diff --git a/owl-bot-staging/google-cloud-ndb/google-cloud-ndb/google-cloud-ndb.txt b/owl-bot-staging/google-cloud-ndb/google-cloud-ndb/google-cloud-ndb.txt new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-ndb/.coveragerc b/packages/google-cloud-ndb/.coveragerc new file mode 100644 index 000000000000..1cee855b5c17 --- /dev/null +++ b/packages/google-cloud-ndb/.coveragerc @@ -0,0 +1,14 @@ +[run] +branch = True + +[report] +fail_under = 100 +show_missing = True +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER +omit = + .nox/* + */gapic/*.py + */proto/*.py + tests/*/*.py diff --git a/packages/google-cloud-ndb/.flake8 b/packages/google-cloud-ndb/.flake8 new file mode 100644 index 000000000000..dc7fc7eed05e --- /dev/null +++ b/packages/google-cloud-ndb/.flake8 @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +[flake8] +ignore = E203, E266, E501, W503 diff --git a/packages/google-cloud-ndb/.gitignore b/packages/google-cloud-ndb/.gitignore new file mode 100644 index 000000000000..63022fac2ea2 --- /dev/null +++ b/packages/google-cloud-ndb/.gitignore @@ -0,0 +1,56 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.tox +.cache +.pytest_cache +htmlcov + +# Translations +*.mo + +# Mac +.DS_Store + +# Mr Developer +.mr.developer.cfg +.project +.pydevproject + +# JetBrains +.idea + +# VS Code +.vscode + +# Built documentation +docs/_build + +# Test logs +coverage.xml +*sponge_log.xml diff --git a/packages/google-cloud-ndb/.librarian/state.yaml b/packages/google-cloud-ndb/.librarian/state.yaml new file mode 100644 index 000000000000..5fb58467177d --- /dev/null +++ b/packages/google-cloud-ndb/.librarian/state.yaml @@ -0,0 +1,10 @@ +image: us-central1-docker.pkg.dev/cloud-sdk-librarian-prod/images-prod/python-librarian-generator@sha256:97c3041de740f26b132d3c5d43f0097f990e8b0d1f2e6707054840024c20ab0c +libraries: + - id: google-cloud-ndb + version: 2.3.4 + apis: [] + source_roots: + - . + preserve_regex: [] + remove_regex: [] + tag_format: v{version} diff --git a/packages/google-cloud-ndb/.repo-metadata.json b/packages/google-cloud-ndb/.repo-metadata.json new file mode 100644 index 000000000000..bb0b0f08281e --- /dev/null +++ b/packages/google-cloud-ndb/.repo-metadata.json @@ -0,0 +1,14 @@ +{ + "name": "python-ndb", + "name_pretty": "NDB Client Library for Google Cloud Datastore", + "client_documentation": "https://googleapis.dev/python/python-ndb/latest", + "issue_tracker": "https://github.com/googleapis/python-ndb/issues", + "release_level": "stable", + "language": "python", + "library_type": "GAPIC_MANUAL", + "repo": "googleapis/python-ndb", + "distribution_name": "google-cloud-ndb", + "default_version": "", + "codeowner_team": "@googleapis/firestore-dpe @googleapis/gcs-sdk-team", + "api_shortname": "datastore" +} diff --git a/packages/google-cloud-ndb/AUTHORS b/packages/google-cloud-ndb/AUTHORS new file mode 100644 index 000000000000..37999415b489 --- /dev/null +++ b/packages/google-cloud-ndb/AUTHORS @@ -0,0 +1,9 @@ +# This is the official list of ndb authors for copyright purposes. +# Names should be added to this file as: +# Name or Organization +# The email address is not required for organizations. +Google Inc. +Beech Horn +James Morrison +Rodrigo Moraes +Danny Hermes diff --git a/packages/google-cloud-ndb/CHANGELOG.md b/packages/google-cloud-ndb/CHANGELOG.md new file mode 100644 index 000000000000..f1ca170219c7 --- /dev/null +++ b/packages/google-cloud-ndb/CHANGELOG.md @@ -0,0 +1,665 @@ +# Changelog + +[PyPI History][1] + +[1]: https://pypi.org/project/google-cloud-ndb/#history + +## [2.3.4](https://github.com/googleapis/python-ndb/compare/v2.3.3...v2.3.4) (2025-06-11) + + +### Bug Fixes + +* Allow protobuf 6.x, allow redis 6.x ([#1013](https://github.com/googleapis/python-ndb/issues/1013)) ([b3684fe](https://github.com/googleapis/python-ndb/commit/b3684fe46c13b5d39deccc456f544b0f6f473d91)) + +## [2.3.3](https://github.com/googleapis/python-ndb/compare/v2.3.2...v2.3.3) (2025-05-09) + + +### Bug Fixes + +* Support sub-meanings for datastore v2.20.3 ([#1014](https://github.com/googleapis/python-ndb/issues/1014)) ([88f14fa](https://github.com/googleapis/python-ndb/commit/88f14fa462b7f7caf72688374682bb1b7a2d933c)) + +## [2.3.2](https://github.com/googleapis/python-ndb/compare/v2.3.1...v2.3.2) (2024-07-15) + + +### Bug Fixes + +* Allow Protobuf 5.x ([#991](https://github.com/googleapis/python-ndb/issues/991)) ([5812a3c](https://github.com/googleapis/python-ndb/commit/5812a3c2833ef9edda1726645e32789752474bd6)) + +## [2.3.1](https://github.com/googleapis/python-ndb/compare/v2.3.0...v2.3.1) (2024-03-16) + + +### Bug Fixes + +* **grpc:** Fix large payload handling when using the emulator. ([#975](https://github.com/googleapis/python-ndb/issues/975)) ([d9162ae](https://github.com/googleapis/python-ndb/commit/d9162aee709062683bf5f9f01208bd40f46d490a)) +* Remove uses of six. [#913](https://github.com/googleapis/python-ndb/issues/913) ([#958](https://github.com/googleapis/python-ndb/issues/958)) ([e17129a](https://github.com/googleapis/python-ndb/commit/e17129a2114c3f5d45b99cc9a4911b586eb3fafa)) +* Show a non-None error for core_exception.Unknown errors. ([#968](https://github.com/googleapis/python-ndb/issues/968)) ([66e61cc](https://github.com/googleapis/python-ndb/commit/66e61cc578335509d480650906528fa390f44c11)) + + +### Documentation + +* Document how to run system tests against the emulator. ([#963](https://github.com/googleapis/python-ndb/issues/963)) ([47db5b9](https://github.com/googleapis/python-ndb/commit/47db5b9f6ee1fc7c01ad86d476cd8e066fb5cffb)) +* Note to use functools.wrap instead of utils.wrapping. ([#966](https://github.com/googleapis/python-ndb/issues/966)) ([5e9f3d6](https://github.com/googleapis/python-ndb/commit/5e9f3d6977677c20b3447f07bf8bcf4553aac076)) +* Tell users of utils.wrapping to use functools.wraps ([#967](https://github.com/googleapis/python-ndb/issues/967)) ([042645b](https://github.com/googleapis/python-ndb/commit/042645b52608a1c11645dd4b014a90040468b113)) + +## [2.3.0](https://github.com/googleapis/python-ndb/compare/v2.2.2...v2.3.0) (2024-03-01) + + +### Features + +* Add field information when raising validation errors. ([#956](https://github.com/googleapis/python-ndb/issues/956)) ([17caf0b](https://github.com/googleapis/python-ndb/commit/17caf0b5f7d0c4d18522f676c8af990b8ff8462d)) +* Add Python 3.12 ([#949](https://github.com/googleapis/python-ndb/issues/949)) ([b5c8477](https://github.com/googleapis/python-ndb/commit/b5c847783b80071c2dd9e9a3dbf899230c99e64a)) +* Add support for google.cloud.ndb.__version__ ([#929](https://github.com/googleapis/python-ndb/issues/929)) ([42b3f01](https://github.com/googleapis/python-ndb/commit/42b3f0137caed25ac3242435b571155d2d84c78e)) +* Add support for server side NOT_IN filter. ([#957](https://github.com/googleapis/python-ndb/issues/957)) ([f0b0724](https://github.com/googleapis/python-ndb/commit/f0b0724d7e364cc3f3574e77076465657089b09c)) +* Allow queries using server side IN. ([#954](https://github.com/googleapis/python-ndb/issues/954)) ([2646cef](https://github.com/googleapis/python-ndb/commit/2646cef3e2687461174a11c45f29de7b84d1fcdb)) +* Introduce compatibility with native namespace packages ([#933](https://github.com/googleapis/python-ndb/issues/933)) ([ccae387](https://github.com/googleapis/python-ndb/commit/ccae387720a28db2686e69dfe23a2599fc4908f0)) +* Use server side != for queries. ([#950](https://github.com/googleapis/python-ndb/issues/950)) ([106772f](https://github.com/googleapis/python-ndb/commit/106772f031f6c37500a0d463698e59008f9bf19a)) + + +### Bug Fixes + +* Compressed repeated to uncompressed property ([#772](https://github.com/googleapis/python-ndb/issues/772)) ([dab9edf](https://github.com/googleapis/python-ndb/commit/dab9edf0fc161051eb13c296cbe973b3a16b502d)) +* Repeated structured property containing blob property with legacy_data ([#817](https://github.com/googleapis/python-ndb/issues/817)) ([#946](https://github.com/googleapis/python-ndb/issues/946)) ([455f860](https://github.com/googleapis/python-ndb/commit/455f860343ff1b71232dad98cf91415492a899ca)) + + +### Documentation + +* **__init__:** Note that Firestore in Datastore Mode is supported ([#919](https://github.com/googleapis/python-ndb/issues/919)) ([0fa75e7](https://github.com/googleapis/python-ndb/commit/0fa75e71dfc6d56d2c0eaf214a48774b99bb959f)) +* Correct read_consistency docs. ([#948](https://github.com/googleapis/python-ndb/issues/948)) ([7e8481d](https://github.com/googleapis/python-ndb/commit/7e8481db84a6d0b96cf09c38e90f47d6b7847a0b)) +* Fix a mistaken ID description ([#943](https://github.com/googleapis/python-ndb/issues/943)) ([5103813](https://github.com/googleapis/python-ndb/commit/51038139e45807b3a14346ded702fbe202dcfdf2)) +* Show how to use named databases ([#932](https://github.com/googleapis/python-ndb/issues/932)) ([182fe4e](https://github.com/googleapis/python-ndb/commit/182fe4e2d295768aaf016f94cb43b6b1e5572ebd)) + +## [2.2.2](https://github.com/googleapis/python-ndb/compare/v2.2.1...v2.2.2) (2023-09-19) + + +### Documentation + +* **query:** Document deprecation of Query.default_options ([#915](https://github.com/googleapis/python-ndb/issues/915)) ([a656719](https://github.com/googleapis/python-ndb/commit/a656719d8a4f20a8b8dc564a1e3837a2cfb037c4)), closes [#880](https://github.com/googleapis/python-ndb/issues/880) + +## [2.2.1](https://github.com/googleapis/python-ndb/compare/v2.2.0...v2.2.1) (2023-09-15) + + +### Bug Fixes + +* **deps:** Add missing six dependency ([#912](https://github.com/googleapis/python-ndb/issues/912)) ([3b1ffb7](https://github.com/googleapis/python-ndb/commit/3b1ffb7e5cabdadfe2a4be6802adef774eec5ef8)) + + +### Documentation + +* Mark database argument for get_by_id and its async counterpart as ignored ([#905](https://github.com/googleapis/python-ndb/issues/905)) ([b0f4310](https://github.com/googleapis/python-ndb/commit/b0f431048b7b2ebb20e4255340290c7687e27425)) + +## [2.2.0](https://github.com/googleapis/python-ndb/compare/v2.1.1...v2.2.0) (2023-07-26) + + +### Features + +* Named db support ([#882](https://github.com/googleapis/python-ndb/issues/882)) ([f5713b0](https://github.com/googleapis/python-ndb/commit/f5713b0e36e54ef69e9fa7e99975f32870832f65)) + + +### Documentation + +* **query:** Fix Py2-style print statements ([#878](https://github.com/googleapis/python-ndb/issues/878)) ([a3a181a](https://github.com/googleapis/python-ndb/commit/a3a181a427cc292882691d963b30bc78c05c6592)) + +## [2.1.1](https://github.com/googleapis/python-ndb/compare/v2.1.0...v2.1.1) (2023-02-28) + + +### Bug Fixes + +* Query options were not respecting use_cache ([#873](https://github.com/googleapis/python-ndb/issues/873)) ([802d88d](https://github.com/googleapis/python-ndb/commit/802d88d108969cba02437f55e5858556221930f3)), closes [#752](https://github.com/googleapis/python-ndb/issues/752) + + +### Documentation + +* Note that we support Python 3.11 in CONTRIBUTING file ([#872](https://github.com/googleapis/python-ndb/issues/872)) ([982ee5f](https://github.com/googleapis/python-ndb/commit/982ee5f9e768c6f7f5ef19bf6fe9e646e4e08e1f)) +* Use cached versions of Cloud objects.inv files ([#863](https://github.com/googleapis/python-ndb/issues/863)) ([4471e2f](https://github.com/googleapis/python-ndb/commit/4471e2f11757be280266779544c59c90222b8184)), closes [#862](https://github.com/googleapis/python-ndb/issues/862) + +## [2.1.0](https://github.com/googleapis/python-ndb/compare/v2.0.0...v2.1.0) (2022-12-15) + + +### Features + +* Support client_options for clients ([#815](https://github.com/googleapis/python-ndb/issues/815)) ([6f94f40](https://github.com/googleapis/python-ndb/commit/6f94f40dfcd6f10e3cec979e4eb2b83408c66a30)) + + +### Bug Fixes + +* **zlib:** Accomodate different Zlib compression levels ([#852](https://github.com/googleapis/python-ndb/issues/852)) ([c1ab83b](https://github.com/googleapis/python-ndb/commit/c1ab83b9581b3d4d10dc7d2508b1c93b14e3c31a)) + +## [2.0.0](https://github.com/googleapis/python-ndb/compare/v1.12.0...v2.0.0) (2022-12-06) + + +### ⚠ BREAKING CHANGES + +* **dependencies:** Upgrade to google-cloud-datastore >= 2.7.2 + +### Features + +* **dependencies:** Upgrade to google-cloud-datastore >= 2.7.2 ([12bbcb5](https://github.com/googleapis/python-ndb/commit/12bbcb548c47803406246d6e3cf55cd947b1500a)) + + +### Bug Fixes + +* Correct access to SerializeToString, CopyFrom, and MergeFromString ([12bbcb5](https://github.com/googleapis/python-ndb/commit/12bbcb548c47803406246d6e3cf55cd947b1500a)) +* Fix enum namespaces ([12bbcb5](https://github.com/googleapis/python-ndb/commit/12bbcb548c47803406246d6e3cf55cd947b1500a)) +* Update API capitalization/casing ([12bbcb5](https://github.com/googleapis/python-ndb/commit/12bbcb548c47803406246d6e3cf55cd947b1500a)) +* Update datastore stub creation ([12bbcb5](https://github.com/googleapis/python-ndb/commit/12bbcb548c47803406246d6e3cf55cd947b1500a)) +* Update module imports ([12bbcb5](https://github.com/googleapis/python-ndb/commit/12bbcb548c47803406246d6e3cf55cd947b1500a)) + +## [1.12.0](https://github.com/googleapis/python-ndb/compare/v1.11.2...v1.12.0) (2022-11-29) + + +### Bug Fixes + +* Drop Python 2 support ([90efd77](https://github.com/googleapis/python-ndb/commit/90efd77633c97f530088dc3f079547ef4eefd796)) +* Drop Python 3.6 support ([#829](https://github.com/googleapis/python-ndb/issues/829)) ([b110199](https://github.com/googleapis/python-ndb/commit/b1101994a34f70804027ea0c8a1b9f276d260756)) +* **model:** Ensure repeated props have same kind when converting from ds ([#824](https://github.com/googleapis/python-ndb/issues/824)) ([29f5a85](https://github.com/googleapis/python-ndb/commit/29f5a853174857545e225fe2f0c682dfa0bc3884)) + + +### Documentation + +* Add note in Django middleware documentation that it is unimplemented ([#805](https://github.com/googleapis/python-ndb/issues/805)) ([aa7621d](https://github.com/googleapis/python-ndb/commit/aa7621dba3b5c32141cdcb1d07829a217bb8b0bd)) +* Add note that ProtoRPC message classes are unimplemented ([#819](https://github.com/googleapis/python-ndb/issues/819)) ([ae813e9](https://github.com/googleapis/python-ndb/commit/ae813e9995d103a45a0c7bc6b4c7bdc148c19c29)) +* **context:** Note that several methods are no longer implemented. ([#821](https://github.com/googleapis/python-ndb/issues/821)) ([34c2c38](https://github.com/googleapis/python-ndb/commit/34c2c389d02f4692840631d34b6249b88867d725)) +* **CONTRIBUTING:** Note the need for Redis/Memcached env vars in tests ([#838](https://github.com/googleapis/python-ndb/issues/838)) ([19f8415](https://github.com/googleapis/python-ndb/commit/19f84150ab06ae71e25ee48ba7f7285eb0402738)), closes [#836](https://github.com/googleapis/python-ndb/issues/836) +* Fix bad import path in migration guide ([#827](https://github.com/googleapis/python-ndb/issues/827)) ([7b44961](https://github.com/googleapis/python-ndb/commit/7b449615629b5a08836ee17a8ab34eb8efbaed21)) +* Fix typo in begin_transaction docstring ([#822](https://github.com/googleapis/python-ndb/issues/822)) ([7fd3ed3](https://github.com/googleapis/python-ndb/commit/7fd3ed315d39a9a50746b00898b22edd3f7d1d0c)) +* **README:** Syncronize supported version text with python-datastore ([#837](https://github.com/googleapis/python-ndb/issues/837)) ([316f959](https://github.com/googleapis/python-ndb/commit/316f95913f2dca12f314e429bbe8bd2582bc1c0f)) +* **tasklets:** Fix Py2-style print statement ([#840](https://github.com/googleapis/python-ndb/issues/840)) ([0ebfaed](https://github.com/googleapis/python-ndb/commit/0ebfaedc48911b57d0cb23584a2a84c31a92d06a)) + +## [1.11.2](https://github.com/googleapis/python-ndb/compare/v1.11.1...v1.11.2) (2022-06-03) + + +### Documentation + +* fix changelog header to consistent size ([#773](https://github.com/googleapis/python-ndb/issues/773)) ([7bb4e5a](https://github.com/googleapis/python-ndb/commit/7bb4e5a7bf11061a546f21e6f57cf2937f7a3a9d)) + +## [1.11.1](https://www.github.com/googleapis/python-ndb/compare/v1.11.0...v1.11.1) (2021-11-03) + + +### Bug Fixes + +* increase cache lock expiration time ([#740](https://www.github.com/googleapis/python-ndb/issues/740)) ([2634d01](https://www.github.com/googleapis/python-ndb/commit/2634d01ac9d4a73057d5e16cf476c5ecfc8e7fcf)), closes [#728](https://www.github.com/googleapis/python-ndb/issues/728) + +## [1.11.0](https://www.github.com/googleapis/python-ndb/compare/v1.10.5...v1.11.0) (2021-10-28) + + +### Features + +* add support for python 3.10 ([#735](https://www.github.com/googleapis/python-ndb/issues/735)) ([58620c1](https://www.github.com/googleapis/python-ndb/commit/58620c1b17e3a4b3608614bea620e93f39e1bd3a)) + +## [1.10.5](https://www.github.com/googleapis/python-ndb/compare/v1.10.4...v1.10.5) (2021-10-08) + + +### Bug Fixes + +* correct regression in `Model.get_or_insert` ([#731](https://www.github.com/googleapis/python-ndb/issues/731)) ([921ec69](https://www.github.com/googleapis/python-ndb/commit/921ec695e246e548f207b0c6aded7296e4b3b263)), closes [#729](https://www.github.com/googleapis/python-ndb/issues/729) + +## [1.10.4](https://www.github.com/googleapis/python-ndb/compare/v1.10.3...v1.10.4) (2021-09-28) + + +### Bug Fixes + +* pin grpcio / googleapis-common-protos under Python2 ([#725](https://www.github.com/googleapis/python-ndb/issues/725)) ([ccc82e4](https://www.github.com/googleapis/python-ndb/commit/ccc82e42fe2bbb285779a81cff03866facfad667)) + +## [1.10.3](https://www.github.com/googleapis/python-ndb/compare/v1.10.2...v1.10.3) (2021-09-07) + + +### Bug Fixes + +* use thread-safe iterator to generate context ids ([#716](https://www.github.com/googleapis/python-ndb/issues/716)) ([92ec8ac](https://www.github.com/googleapis/python-ndb/commit/92ec8ac7de8cd0f50d6104b9e514b4e933cfbb13)), closes [#715](https://www.github.com/googleapis/python-ndb/issues/715) + +## [1.10.2](https://www.github.com/googleapis/python-ndb/compare/v1.10.1...v1.10.2) (2021-08-31) + + +### Bug Fixes + +* **deps:** add pytz as an explicit dependency ([#707](https://www.github.com/googleapis/python-ndb/issues/707)) ([6b48548](https://www.github.com/googleapis/python-ndb/commit/6b48548a1ea4b0c125314f907c25b47992ee6556)) + +## [1.10.1](https://www.github.com/googleapis/python-ndb/compare/v1.10.0...v1.10.1) (2021-08-11) + + +### Bug Fixes + +* add rpc request object to debug logging ([#696](https://www.github.com/googleapis/python-ndb/issues/696)) ([45e590a](https://www.github.com/googleapis/python-ndb/commit/45e590a0903e6690a516a1eb35002664eebf540d)), closes [#695](https://www.github.com/googleapis/python-ndb/issues/695) +* allow for legacy repeated structured properties with empty values ([#702](https://www.github.com/googleapis/python-ndb/issues/702)) ([60c293d](https://www.github.com/googleapis/python-ndb/commit/60c293d039721f7e842ac8973a743642e182e4a5)), closes [#694](https://www.github.com/googleapis/python-ndb/issues/694) +* fix bug with concurrent writes to global cache ([#705](https://www.github.com/googleapis/python-ndb/issues/705)) ([bb7cadc](https://www.github.com/googleapis/python-ndb/commit/bb7cadc45df92757b0b2d49c8914a10869d64965)), closes [#692](https://www.github.com/googleapis/python-ndb/issues/692) + +## [1.10.0](https://www.github.com/googleapis/python-ndb/compare/v1.9.0...v1.10.0) (2021-07-20) + + +### Features + +* add 'python_requires' metadata to setup ([#681](https://www.github.com/googleapis/python-ndb/issues/681)) ([e9a09d3](https://www.github.com/googleapis/python-ndb/commit/e9a09d3f0facd29836ccce078575f12e102462c9)) + + +### Bug Fixes + +* fix bug with repeated structured properties with Expando values ([#671](https://www.github.com/googleapis/python-ndb/issues/671)) ([882dff0](https://www.github.com/googleapis/python-ndb/commit/882dff0517be9ddad5814317853ce87bf99d5db0)), closes [#669](https://www.github.com/googleapis/python-ndb/issues/669) +* properly handle legacy structured properties in Expando instances ([#676](https://www.github.com/googleapis/python-ndb/issues/676)) ([70710c8](https://www.github.com/googleapis/python-ndb/commit/70710c83c5ace83504167801da990bc81cb43c89)), closes [#673](https://www.github.com/googleapis/python-ndb/issues/673) +* refactor global cache to address concurrency and fault tolerance issues ([#667](https://www.github.com/googleapis/python-ndb/issues/667)) ([5e2c591](https://www.github.com/googleapis/python-ndb/commit/5e2c591cbd89d8783527252d7f771fba91792602)) + +## [1.9.0](https://www.github.com/googleapis/python-ndb/compare/v1.8.0...v1.9.0) (2021-06-07) + + +### Features + +* don't flush entire global cache on transient errors ([#654](https://www.github.com/googleapis/python-ndb/issues/654)) ([cbf2d7d](https://www.github.com/googleapis/python-ndb/commit/cbf2d7de3d532ce08bd0d25fa18b5226afd216b9)) + + +### Bug Fixes + +* correct inconsistent behavior with regards to namespaces ([#662](https://www.github.com/googleapis/python-ndb/issues/662)) ([cf21a28](https://www.github.com/googleapis/python-ndb/commit/cf21a285e784019f9ba0f2a89a7acc4105fdcd2a)), closes [#661](https://www.github.com/googleapis/python-ndb/issues/661) +* correctly decode falsy values in legacy protocol buffers ([#628](https://www.github.com/googleapis/python-ndb/issues/628)) ([69a9f63](https://www.github.com/googleapis/python-ndb/commit/69a9f63be89ca50bbf0a42d0565a9f1fdcf6d143)), closes [#625](https://www.github.com/googleapis/python-ndb/issues/625) +* defer clearing global cache when in transaction ([#660](https://www.github.com/googleapis/python-ndb/issues/660)) ([73020ed](https://www.github.com/googleapis/python-ndb/commit/73020ed8f8eb1430f87be4b5680690d9e373c846)) +* detect cache write failure for `MemcacheCache` ([#665](https://www.github.com/googleapis/python-ndb/issues/665)) ([5d7f163](https://www.github.com/googleapis/python-ndb/commit/5d7f163988c6e8c43579aae616d275db4ca4ff45)), closes [#656](https://www.github.com/googleapis/python-ndb/issues/656) +* do not set read_consistency for queries. ([#664](https://www.github.com/googleapis/python-ndb/issues/664)) ([36a5b55](https://www.github.com/googleapis/python-ndb/commit/36a5b55b1b21d7333923edd4a42d1a32fd453dfa)), closes [#666](https://www.github.com/googleapis/python-ndb/issues/666) +* limit memcache keys to 250 bytes ([#663](https://www.github.com/googleapis/python-ndb/issues/663)) ([7dc11df](https://www.github.com/googleapis/python-ndb/commit/7dc11df00fc15392fde61e828e1445eb9e66a1ac)), closes [#619](https://www.github.com/googleapis/python-ndb/issues/619) +* properly handle error when clearing cache ([#636](https://www.github.com/googleapis/python-ndb/issues/636)) ([d0ffcf3](https://www.github.com/googleapis/python-ndb/commit/d0ffcf3517fe357d6689943265b829258c397d93)), closes [#633](https://www.github.com/googleapis/python-ndb/issues/633) +* retry connection errors with memcache ([#645](https://www.github.com/googleapis/python-ndb/issues/645)) ([06b466a](https://www.github.com/googleapis/python-ndb/commit/06b466a8421ff7a5586164bf4deb43d6bcbf0ef4)), closes [#620](https://www.github.com/googleapis/python-ndb/issues/620) +* support ordering by key for multi queries ([#630](https://www.github.com/googleapis/python-ndb/issues/630)) ([508d8cb](https://www.github.com/googleapis/python-ndb/commit/508d8cb8c65afe5e885c1fdba4dce933d52cfd4b)), closes [#629](https://www.github.com/googleapis/python-ndb/issues/629) + +## [1.8.0](https://www.github.com/googleapis/python-ndb/compare/v1.7.3...v1.8.0) (2021-04-06) + + +### Features + +* retry global cache operations on transient errors ([#603](https://www.github.com/googleapis/python-ndb/issues/603)) ([5d6b650](https://www.github.com/googleapis/python-ndb/commit/5d6b6503ce40ba0d36ea79a461c2c95897235734)), closes [#601](https://www.github.com/googleapis/python-ndb/issues/601) + + +### Bug Fixes + +* don't return `None` for entities found in queries ([#612](https://www.github.com/googleapis/python-ndb/issues/612)) ([9e5e255](https://www.github.com/googleapis/python-ndb/commit/9e5e255c14716b3046a9dc70bb8a4596beec1562)), closes [#586](https://www.github.com/googleapis/python-ndb/issues/586) +* fix bug with compressed blob property ([#615](https://www.github.com/googleapis/python-ndb/issues/615)) ([d305f9f](https://www.github.com/googleapis/python-ndb/commit/d305f9fd2b1cfe8e7d709849e392402f4ae059ac)), closes [#602](https://www.github.com/googleapis/python-ndb/issues/602) +* fix failing unit test ([#607](https://www.github.com/googleapis/python-ndb/issues/607)) ([5d3927e](https://www.github.com/googleapis/python-ndb/commit/5d3927e0b0a6d6a447585d2cc90077de26f24c5c)), closes [#606](https://www.github.com/googleapis/python-ndb/issues/606) +* handle unpickling between GAE NDB (2.7) to Cloud NDB (3) ([#596](https://www.github.com/googleapis/python-ndb/issues/596)) ([5be4225](https://www.github.com/googleapis/python-ndb/commit/5be4225f20b9216b49f953c464b8b8ef9683d8bf)) +* mock call to `tasklets.sleep` in unit test ([#609](https://www.github.com/googleapis/python-ndb/issues/609)) ([00e23f3](https://www.github.com/googleapis/python-ndb/commit/00e23f3f31fb531b402f087e29b539a7af9ac79f)), closes [#608](https://www.github.com/googleapis/python-ndb/issues/608) +* prevent mismatch error when using default namespace on ancestor queries ([#614](https://www.github.com/googleapis/python-ndb/issues/614)) ([ae67f04](https://www.github.com/googleapis/python-ndb/commit/ae67f04db12c65ecca9d6145f113729072b952f3)) +* reimplement `_clone_properties` ([#610](https://www.github.com/googleapis/python-ndb/issues/610)) ([e23f42b](https://www.github.com/googleapis/python-ndb/commit/e23f42b27cec6f7fcf05ae51d4e6ee2aea30f6ca)), closes [#566](https://www.github.com/googleapis/python-ndb/issues/566) +* replicate legacy behavior for using cache with queries ([#613](https://www.github.com/googleapis/python-ndb/issues/613)) ([edd1185](https://www.github.com/googleapis/python-ndb/commit/edd1185f01c6db5b4876f7b0ce81df0315c98890)), closes [#586](https://www.github.com/googleapis/python-ndb/issues/586) +* support `int` as base type for `BooleanProperty` ([#624](https://www.github.com/googleapis/python-ndb/issues/624)) ([a04bf3a](https://www.github.com/googleapis/python-ndb/commit/a04bf3acef3eb88f23c4f0832ce74af9557cb03d)) + +## [1.7.3](https://www.github.com/googleapis/python-ndb/compare/v1.7.2...v1.7.3) (2021-01-21) + + +### Bug Fixes + +* handle negatives in protobuf deserialization ([#591](https://www.github.com/googleapis/python-ndb/issues/591)) ([0d3d3ca](https://www.github.com/googleapis/python-ndb/commit/0d3d3ca99df10a3d6e1c6f31ee719faa373ccacf)), closes [#590](https://www.github.com/googleapis/python-ndb/issues/590) +* make nested retry blocks work for RPC calls ([#589](https://www.github.com/googleapis/python-ndb/issues/589)) ([f125459](https://www.github.com/googleapis/python-ndb/commit/f125459d4eef05861776ccefd29d137a5f22e240)) + + +### Documentation + +* correct documentation for `GlobalCache` ([#565](https://www.github.com/googleapis/python-ndb/issues/565)) ([be5b157](https://www.github.com/googleapis/python-ndb/commit/be5b1571e8e30bd1d736ae5d77b3017473b1a373)) +* fix return type in fetch docstring ([#594](https://www.github.com/googleapis/python-ndb/issues/594)) ([9eb15f4](https://www.github.com/googleapis/python-ndb/commit/9eb15f4ff75204ad25f943dbc1e85c227d88faf6)), closes [#576](https://www.github.com/googleapis/python-ndb/issues/576) +* fix typo in example code ([#588](https://www.github.com/googleapis/python-ndb/issues/588)) ([76fab49](https://www.github.com/googleapis/python-ndb/commit/76fab49f9d08a2add4135c011d08ff24f04549b2)) + +## [1.7.2](https://www.github.com/googleapis/python-ndb/compare/v1.7.1...v1.7.2) (2020-12-16) + + +### Bug Fixes + +* always use brute-force counting with Datastore emulator and clean up related hacks ([#585](https://www.github.com/googleapis/python-ndb/issues/585)) ([8480a8b](https://www.github.com/googleapis/python-ndb/commit/8480a8bd0d169e2499ee62d1fb9d140aa6ce00d4)) +* return a tuple when empty result returned on query ([#582](https://www.github.com/googleapis/python-ndb/issues/582)) ([7cf0e87](https://www.github.com/googleapis/python-ndb/commit/7cf0e878054dbfe7bc8b6c0c9fea96a602e8e859)) +* support empty not_finished messages that cause query.count() to return early ([#580](https://www.github.com/googleapis/python-ndb/issues/580)) ([fc31553](https://www.github.com/googleapis/python-ndb/commit/fc31553c77f6e7865df0efd4c820f69366f6607c)), closes [#575](https://www.github.com/googleapis/python-ndb/issues/575) + + +### Documentation + +* Add urlsafe() info to migration notes ([#579](https://www.github.com/googleapis/python-ndb/issues/579)) ([9df2f9f](https://www.github.com/googleapis/python-ndb/commit/9df2f9f8be40d95fbde297335eb99b19bafad583)) + +## [1.7.1](https://www.github.com/googleapis/python-ndb/compare/v1.7.0...v1.7.1) (2020-11-11) + + +### Bug Fixes + +* **dependencies:** Pin to less than 2.0.0 for google-cloud-datastore ([#569](https://www.github.com/googleapis/python-ndb/issues/569)) ([c8860a6](https://www.github.com/googleapis/python-ndb/commit/c8860a6541f638fb458b74cfdffc1ddb7b035549)), closes [#568](https://www.github.com/googleapis/python-ndb/issues/568) + +## [1.7.0](https://www.github.com/googleapis/python-ndb/compare/v1.6.1...v1.7.0) (2020-10-22) + + +### Features + +* fault tolerance for global caches ([#560](https://www.github.com/googleapis/python-ndb/issues/560)) ([8ab8ee0](https://www.github.com/googleapis/python-ndb/commit/8ab8ee01f5577cfe468ed77d3cd48d6f6b816b0e)), closes [#557](https://www.github.com/googleapis/python-ndb/issues/557) +* Transaction propagation using ndb.TransactionOptions ([#537](https://www.github.com/googleapis/python-ndb/issues/537)) ([f3aa027](https://www.github.com/googleapis/python-ndb/commit/f3aa027d7d55d9aee9a72ce23cebc26a5975bb28)) + +## [1.6.1](https://www.github.com/googleapis/python-ndb/compare/v1.6.0...v1.6.1) (2020-10-08) + + +### Bug Fixes + +* `[@non](https://www.github.com/non)_transactional` decorator was not working correctly with async ([#554](https://www.github.com/googleapis/python-ndb/issues/554)) ([758c8e6](https://www.github.com/googleapis/python-ndb/commit/758c8e66314da4cb1f077e9fbe8cf1ae09bccd4e)), closes [#552](https://www.github.com/googleapis/python-ndb/issues/552) +* fix a connection leak in RedisCache ([#556](https://www.github.com/googleapis/python-ndb/issues/556)) ([47ae172](https://www.github.com/googleapis/python-ndb/commit/47ae172edc435a49d25687d83747afff153b59d2)) +* get_by_id and get_or_insert should use default namespace when passed in ([#542](https://www.github.com/googleapis/python-ndb/issues/542)) ([3674650](https://www.github.com/googleapis/python-ndb/commit/3674650a7ba1a1dd7a72b728f343f623f660ba6a)), closes [#535](https://www.github.com/googleapis/python-ndb/issues/535) + + +### Documentation + +* address docs builds and memcached customization to docker file ([#548](https://www.github.com/googleapis/python-ndb/issues/548)) ([88e7e24](https://www.github.com/googleapis/python-ndb/commit/88e7e244854acb2409c324855deb9229f33a44fd)) +* update docker image used for docs generation [#549](https://www.github.com/googleapis/python-ndb/issues/549) ([5e8bf57](https://www.github.com/googleapis/python-ndb/commit/5e8bf57508e3b995f51dcc3171e5ea77c4bc4484)) + +## [1.6.0](https://www.github.com/googleapis/python-ndb/compare/v1.5.2...v1.6.0) (2020-09-14) + + +### Features + +* memcached integration ([#536](https://www.github.com/googleapis/python-ndb/issues/536)) ([2bd43da](https://www.github.com/googleapis/python-ndb/commit/2bd43dabbd6b6fbffbb4390520e47ae06262c858)) + +## [1.5.2](https://www.github.com/googleapis/python-ndb/compare/v1.5.1...v1.5.2) (2020-09-03) + + +### Bug Fixes + +* avoid kind error when using subclasses in local structured properties ([#531](https://www.github.com/googleapis/python-ndb/issues/531)) ([49f9e48](https://www.github.com/googleapis/python-ndb/commit/49f9e48a7d8bf9c3c8cc8a30ae385bcbcb95dbaa)) +* fix bug when setting naive datetime on `DateTimeProperty` with timezone ([#534](https://www.github.com/googleapis/python-ndb/issues/534)) ([ad42606](https://www.github.com/googleapis/python-ndb/commit/ad426063257f8633bb4207a77b29b35fc0173ec1)), closes [#517](https://www.github.com/googleapis/python-ndb/issues/517) +* make optimized `Query.count()` work with the datastore emulator ([#528](https://www.github.com/googleapis/python-ndb/issues/528)) ([e5df1e3](https://www.github.com/googleapis/python-ndb/commit/e5df1e37c97fc0765f8f95ada6d4dadd7b4bb445)), closes [#525](https://www.github.com/googleapis/python-ndb/issues/525) +* make sure `keys_only` ordered multiquery returns keys not entities ([#527](https://www.github.com/googleapis/python-ndb/issues/527)) ([2078dc1](https://www.github.com/googleapis/python-ndb/commit/2078dc1c2239299729d8ecade2e3592f49bc65db)), closes [#526](https://www.github.com/googleapis/python-ndb/issues/526) + + +### Documentation + +* fix type hint for urlsafe ([#532](https://www.github.com/googleapis/python-ndb/issues/532)) ([87a3475](https://www.github.com/googleapis/python-ndb/commit/87a347536b459c461a02c401b8a8c097e276d3ea)), closes [#529](https://www.github.com/googleapis/python-ndb/issues/529) + +## [1.5.1](https://www.github.com/googleapis/python-ndb/compare/v1.5.0...v1.5.1) (2020-08-28) + + +### Bug Fixes + +* fix exception handling bug in tasklets ([#520](https://www.github.com/googleapis/python-ndb/issues/520)) ([fc0366a](https://www.github.com/googleapis/python-ndb/commit/fc0366a9db9fa5263533631cb08ccb5be07960ad)), closes [#519](https://www.github.com/googleapis/python-ndb/issues/519) +* fix format exceptions in `utils.logging_debug` ([#514](https://www.github.com/googleapis/python-ndb/issues/514)) ([d38c0a3](https://www.github.com/googleapis/python-ndb/commit/d38c0a36dac1dc183d344a08050815010b256638)), closes [#508](https://www.github.com/googleapis/python-ndb/issues/508) +* transparently add sort properties to projection for multiqueries ([#511](https://www.github.com/googleapis/python-ndb/issues/511)) ([4e46327](https://www.github.com/googleapis/python-ndb/commit/4e463273a36b5fe69f87d429260fba1a690d55b9)), closes [#509](https://www.github.com/googleapis/python-ndb/issues/509) + +## [1.5.0](https://www.github.com/googleapis/python-ndb/compare/v1.4.2...v1.5.0) (2020-08-12) + + +### Features + +* use contextvars.ConvextVar instead of threading.local in Python 3 ([4c634f3](https://www.github.com/googleapis/python-ndb/commit/4c634f348f8847fda139fe469e0e8adfabfd649a)), closes [#504](https://www.github.com/googleapis/python-ndb/issues/504) + + +### Bug Fixes + +* fix concurrency bug in redis cache implementation ([#503](https://www.github.com/googleapis/python-ndb/issues/503)) ([6c18b95](https://www.github.com/googleapis/python-ndb/commit/6c18b9522e83e5e599a491c6ed287de2d7cdf089)), closes [#496](https://www.github.com/googleapis/python-ndb/issues/496) +* support polymodel in local structured property ([#497](https://www.github.com/googleapis/python-ndb/issues/497)) ([9ccbdd2](https://www.github.com/googleapis/python-ndb/commit/9ccbdd23448dcb401b111f03e951fa89ae65174f)), closes [#481](https://www.github.com/googleapis/python-ndb/issues/481) + +## [1.4.2](https://www.github.com/googleapis/python-ndb/compare/v1.4.1...v1.4.2) (2020-07-30) + + +### Bug Fixes + +* include ancestors in `Key.to_legacy_urlsafe` ([#494](https://www.github.com/googleapis/python-ndb/issues/494)) ([0f29190](https://www.github.com/googleapis/python-ndb/commit/0f2919070ef78a17988fb5cae573a1514ff63926)), closes [#478](https://www.github.com/googleapis/python-ndb/issues/478) +* properly handle explicitly passing default namespace ([#488](https://www.github.com/googleapis/python-ndb/issues/488)) ([3c64483](https://www.github.com/googleapis/python-ndb/commit/3c644838a499f54620c6a12773f8cdd1c245096f)), closes [#476](https://www.github.com/googleapis/python-ndb/issues/476) + +## [1.4.1](https://www.github.com/googleapis/python-ndb/compare/v1.4.0...v1.4.1) (2020-07-10) + + +### Bug Fixes + +* do not disclose cache contents in stack traces ([#485](https://www.github.com/googleapis/python-ndb/issues/485)) ([2d2c5a2](https://www.github.com/googleapis/python-ndb/commit/2d2c5a2004629b807f296f74648c789c6ce9a6ba)), closes [#482](https://www.github.com/googleapis/python-ndb/issues/482) + +## [1.4.0](https://www.github.com/googleapis/python-ndb/compare/v1.3.0...v1.4.0) (2020-07-01) + + +### Features + +* allow `Query.fetch_page` for queries with post filters ([#463](https://www.github.com/googleapis/python-ndb/issues/463)) ([632435c](https://www.github.com/googleapis/python-ndb/commit/632435c155f565f5e7b45ab08680613599994f0e)), closes [#270](https://www.github.com/googleapis/python-ndb/issues/270) +* record time spent waiting on rpc calls ([#472](https://www.github.com/googleapis/python-ndb/issues/472)) ([1629805](https://www.github.com/googleapis/python-ndb/commit/16298057c96921a3c995e9ddded36d37fc90819f)) + + +### Bug Fixes + +* ignore datastore properties that are not mapped to NDB properties ([#470](https://www.github.com/googleapis/python-ndb/issues/470)) ([ab460fa](https://www.github.com/googleapis/python-ndb/commit/ab460fad8ded5b3b550359253e90a6b189145842)), closes [#461](https://www.github.com/googleapis/python-ndb/issues/461) +* make sure `tests` package is not included in distribution ([#469](https://www.github.com/googleapis/python-ndb/issues/469)) ([5a20d0a](https://www.github.com/googleapis/python-ndb/commit/5a20d0af6c6c1c2d10e9e42a35a5b58fa952547c)), closes [#468](https://www.github.com/googleapis/python-ndb/issues/468) +* retry grpc `UNKNOWN` errors ([#458](https://www.github.com/googleapis/python-ndb/issues/458)) ([5d354e4](https://www.github.com/googleapis/python-ndb/commit/5d354e4b4247372f2ffdc9caa2df1516ce97ff8d)), closes [#310](https://www.github.com/googleapis/python-ndb/issues/310) + +## [1.3.0](https://www.github.com/googleapis/python-ndb/compare/v1.2.1...v1.3.0) (2020-06-01) + + +### Features + +* add templates for python samples projects ([#506](https://www.github.com/googleapis/python-ndb/issues/506)) ([#455](https://www.github.com/googleapis/python-ndb/issues/455)) ([e329276](https://www.github.com/googleapis/python-ndb/commit/e32927623645112513675fbbfe5884a63eac24e1)) +* convert grpc errors to api core exceptions ([#457](https://www.github.com/googleapis/python-ndb/issues/457)) ([042cf6c](https://www.github.com/googleapis/python-ndb/commit/042cf6ceabe2a47b2fe77501ccd618e64877886a)), closes [#416](https://www.github.com/googleapis/python-ndb/issues/416) + + +### Bug Fixes + +* Add support for 'name' Key instances to to_legacy_urlsafe ([#420](https://www.github.com/googleapis/python-ndb/issues/420)) ([59fc5af](https://www.github.com/googleapis/python-ndb/commit/59fc5afc36d01b72ad4b53befa593803b55df8b3)) +* all query types should use cache if available ([#454](https://www.github.com/googleapis/python-ndb/issues/454)) ([69b3a0a](https://www.github.com/googleapis/python-ndb/commit/69b3a0ae49ab446a9ed903646ae6e01690411d3e)), closes [#441](https://www.github.com/googleapis/python-ndb/issues/441) +* fix `NotImplementedError` for `get_or_insert` inside a transaction ([#451](https://www.github.com/googleapis/python-ndb/issues/451)) ([99aa403](https://www.github.com/googleapis/python-ndb/commit/99aa40358b469be1c8486c84ba5873929715f25e)), closes [#433](https://www.github.com/googleapis/python-ndb/issues/433) +* make sure datastore key constructor never gets None in a pair ([#446](https://www.github.com/googleapis/python-ndb/issues/446)) ([e6173cf](https://www.github.com/googleapis/python-ndb/commit/e6173cf8feec866c365d35e7cb461f72d19544fa)), closes [#384](https://www.github.com/googleapis/python-ndb/issues/384) [#439](https://www.github.com/googleapis/python-ndb/issues/439) +* refactor transactions to use their own event loops ([#443](https://www.github.com/googleapis/python-ndb/issues/443)) ([7590be8](https://www.github.com/googleapis/python-ndb/commit/7590be8233fe58f9c45076eb38c1995363f02362)), closes [#426](https://www.github.com/googleapis/python-ndb/issues/426) [#426](https://www.github.com/googleapis/python-ndb/issues/426) +* respect `_code_name` in `StructuredProperty.__getattr__` ([#453](https://www.github.com/googleapis/python-ndb/issues/453)) ([4f54dfc](https://www.github.com/googleapis/python-ndb/commit/4f54dfcee91b15d45cc6046f6b9933d1593d0956)), closes [#449](https://www.github.com/googleapis/python-ndb/issues/449) +* strip `order_by` option from query when using `count()` ([#452](https://www.github.com/googleapis/python-ndb/issues/452)) ([9d20a2d](https://www.github.com/googleapis/python-ndb/commit/9d20a2d5d75cc0590c4326019ea94159bb4aebe2)), closes [#447](https://www.github.com/googleapis/python-ndb/issues/447) + +## [1.2.1](https://www.github.com/googleapis/python-ndb/compare/v1.2.0...v1.2.1) (2020-05-15) + + +### Features + +* Improve custom validators ([#408](https://www.github.com/googleapis/python-ndb/issues/408)) ([5b6cdd6](https://www.github.com/googleapis/python-ndb/commit/5b6cdd627dfce3e5b987c2ecd945d39b5056aa37)), closes [#252](https://www.github.com/googleapis/python-ndb/issues/252) + + +### Bug Fixes + +* clear context cache on rollback ([#410](https://www.github.com/googleapis/python-ndb/issues/410)) ([aa17986](https://www.github.com/googleapis/python-ndb/commit/aa17986759f32ea16c340961d70fbc8fc123b244)), closes [#398](https://www.github.com/googleapis/python-ndb/issues/398) +* do not allow empty key parts for key constructor in namespaced model ([#401](https://www.github.com/googleapis/python-ndb/issues/401)) ([f3528b3](https://www.github.com/googleapis/python-ndb/commit/f3528b3e51c93c762c4e31eed76a1b2f06be84e1)), closes [#384](https://www.github.com/googleapis/python-ndb/issues/384) +* don't rely on duck typing for `_retry.is_transient_error` ([#425](https://www.github.com/googleapis/python-ndb/issues/425)) ([4524542](https://www.github.com/googleapis/python-ndb/commit/4524542e5f6da1af047d86fee3d48cf65ea75508)), closes [#415](https://www.github.com/googleapis/python-ndb/issues/415) +* handle empty batches from Firestore ([#396](https://www.github.com/googleapis/python-ndb/issues/396)) ([1a054ca](https://www.github.com/googleapis/python-ndb/commit/1a054cadff07074de9395cb99ae2c40f987aed2e)), closes [#386](https://www.github.com/googleapis/python-ndb/issues/386) +* make sure reads happen in transaction if there is a transaction ([#395](https://www.github.com/googleapis/python-ndb/issues/395)) ([f32644f](https://www.github.com/googleapis/python-ndb/commit/f32644fcf8c16dc0fd74e14108d7955effff1771)), closes [#394](https://www.github.com/googleapis/python-ndb/issues/394) +* more should be boolean in fetch_page call ([#423](https://www.github.com/googleapis/python-ndb/issues/423)) ([a69ffd2](https://www.github.com/googleapis/python-ndb/commit/a69ffd21aaaa881f5e8e54339fd62a1b02d19c4b)), closes [#422](https://www.github.com/googleapis/python-ndb/issues/422) +* support same options in model.query as query ([#407](https://www.github.com/googleapis/python-ndb/issues/407)) ([d08019f](https://www.github.com/googleapis/python-ndb/commit/d08019fbecb0f018987267b01929a21e97b418e2)) +* uniform handling of `projection` argument ([#428](https://www.github.com/googleapis/python-ndb/issues/428)) ([2b65c04](https://www.github.com/googleapis/python-ndb/commit/2b65c04e72a66062e2c792b5b1fb067fb935987f)), closes [#379](https://www.github.com/googleapis/python-ndb/issues/379) +* use `skipped_results` from query results to adjust offset ([#399](https://www.github.com/googleapis/python-ndb/issues/399)) ([6d1452d](https://www.github.com/googleapis/python-ndb/commit/6d1452d977f3f030ff65d5cbb3e593c0789e6c14)), closes [#392](https://www.github.com/googleapis/python-ndb/issues/392) +* use fresh context cache for each transaction ([#409](https://www.github.com/googleapis/python-ndb/issues/409)) ([5109b91](https://www.github.com/googleapis/python-ndb/commit/5109b91425e917727973079020dc51c2b8fddf53)), closes [#394](https://www.github.com/googleapis/python-ndb/issues/394) +* use true `keys_only` query for `Query.count()` ([#405](https://www.github.com/googleapis/python-ndb/issues/405)) ([88184c3](https://www.github.com/googleapis/python-ndb/commit/88184c312dd7bdc7bd36ec58fd53e3fd5001d7ac)), closes [#400](https://www.github.com/googleapis/python-ndb/issues/400) [#404](https://www.github.com/googleapis/python-ndb/issues/404) + +## [1.2.0](https://www.github.com/googleapis/python-ndb/compare/v1.1.2...v1.2.0) (2020-04-20) + + +### Features + +* add `namespace` property to `context.Context` ([#388](https://www.github.com/googleapis/python-ndb/issues/388)) ([34bac15](https://www.github.com/googleapis/python-ndb/commit/34bac153bcc191857715a8760671acaf4fd12706)), closes [#385](https://www.github.com/googleapis/python-ndb/issues/385) +* new `join` argument for `transaction` and related functions ([#381](https://www.github.com/googleapis/python-ndb/issues/381)) ([2c91685](https://www.github.com/googleapis/python-ndb/commit/2c916851d088b650a5d643dc322a4919f456fe05)), closes [#366](https://www.github.com/googleapis/python-ndb/issues/366) + + +### Bug Fixes + +* accept `bytes` or `str` as base value for `JsonProperty` ([#380](https://www.github.com/googleapis/python-ndb/issues/380)) ([e7a0c7c](https://www.github.com/googleapis/python-ndb/commit/e7a0c7c8fb7d80f009442f759abadbd336c0c828)), closes [#378](https://www.github.com/googleapis/python-ndb/issues/378) +* add `ABORTED` to retryable status codes ([#391](https://www.github.com/googleapis/python-ndb/issues/391)) ([183c0c3](https://www.github.com/googleapis/python-ndb/commit/183c0c33a4429ad6bdaa9f141a8ac88ad4e3544d)), closes [#383](https://www.github.com/googleapis/python-ndb/issues/383) +* add missing _get_for_dict method ([#368](https://www.github.com/googleapis/python-ndb/issues/368)) ([55b80ff](https://www.github.com/googleapis/python-ndb/commit/55b80ffa086568e8f820f9ab304952bc39383bd8)), closes [#367](https://www.github.com/googleapis/python-ndb/issues/367) +* empty Entities for optional LocalStructuredProperty fields ([#370](https://www.github.com/googleapis/python-ndb/issues/370)) ([27a0969](https://www.github.com/googleapis/python-ndb/commit/27a0969982013b37d3f6d8785c3ad127788661f9)), closes [#369](https://www.github.com/googleapis/python-ndb/issues/369) +* return type in DateTimeProperty._to_base_type docstring ([#371](https://www.github.com/googleapis/python-ndb/issues/371)) ([0c549c8](https://www.github.com/googleapis/python-ndb/commit/0c549c89ff78554c4a4dde40973b503aa741422f)) + +## [1.1.2](https://www.github.com/googleapis/python-ndb/compare/v1.1.1...v1.1.2) (2020-03-16) + + +### Bug Fixes + +* check for legacy local structured property values ([#365](https://www.github.com/googleapis/python-ndb/issues/365)) ([f81f406](https://www.github.com/googleapis/python-ndb/commit/f81f406d8e1059121341828836fce2aae5782fca)), closes [#359](https://www.github.com/googleapis/python-ndb/issues/359) +* move stub (grpc communication channel) to client ([#362](https://www.github.com/googleapis/python-ndb/issues/362)) ([90e0625](https://www.github.com/googleapis/python-ndb/commit/90e06252df25fa2ce199543e7b01b17ec284aaf1)), closes [#343](https://www.github.com/googleapis/python-ndb/issues/343) + +## [1.1.1](https://www.github.com/googleapis/python-ndb/compare/v1.1.0...v1.1.1) (2020-03-05) + + +### Bug Fixes + +* fix bug with `yield` of empty list in tasklets ([#354](https://www.github.com/googleapis/python-ndb/issues/354)) ([2d60ebf](https://www.github.com/googleapis/python-ndb/commit/2d60ebfe656abd75f6b9303550b2e03c2cbd79b7)), closes [#353](https://www.github.com/googleapis/python-ndb/issues/353) +* LocalStructuredProperty keep_keys ([#355](https://www.github.com/googleapis/python-ndb/issues/355)) ([9ff1b3d](https://www.github.com/googleapis/python-ndb/commit/9ff1b3de817da50b58a6aed574d7e2f2dcf92310)) +* support nested sequences in parallel `yield` for tasklets ([#358](https://www.github.com/googleapis/python-ndb/issues/358)) ([8c91e7a](https://www.github.com/googleapis/python-ndb/commit/8c91e7ae8262f355a9eafe9051b3c1ef19d4c7cd)), closes [#349](https://www.github.com/googleapis/python-ndb/issues/349) + +## [1.1.0](https://www.github.com/googleapis/python-ndb/compare/v1.0.1...v1.1.0) (2020-03-02) + + +### Features + +* `Key.to_legacy_urlsafe()` ([#348](https://www.github.com/googleapis/python-ndb/issues/348)) ([ab10e3c](https://www.github.com/googleapis/python-ndb/commit/ab10e3c4998b8995d5a057163ce8d9dc8992111a)) + + +### Bug Fixes + +* allow legacy ndb to read LocalStructuredProperty entities. ([#344](https://www.github.com/googleapis/python-ndb/issues/344)) ([7b07692](https://www.github.com/googleapis/python-ndb/commit/7b0769236841cea1e864ae1e928a7b7021d300dc)) +* fix delete in transaction ([#333](https://www.github.com/googleapis/python-ndb/issues/333)) ([5c162f4](https://www.github.com/googleapis/python-ndb/commit/5c162f4337b837f7125b1fb03f8cff5fb1b4a356)), closes [#271](https://www.github.com/googleapis/python-ndb/issues/271) +* make sure ``key.Key`` uses namespace from client when not specified ([#339](https://www.github.com/googleapis/python-ndb/issues/339)) ([44f02e4](https://www.github.com/googleapis/python-ndb/commit/44f02e46deef245f4d1ae80f9d2e4edd46ecd265)), closes [#337](https://www.github.com/googleapis/python-ndb/issues/337) +* properly exclude from indexes non-indexed subproperties of structured properties ([#346](https://www.github.com/googleapis/python-ndb/issues/346)) ([dde6b85](https://www.github.com/googleapis/python-ndb/commit/dde6b85897457cef7a1080690df5cfae9cb6c31e)), closes [#341](https://www.github.com/googleapis/python-ndb/issues/341) +* resurrect support for compressed text property ([#342](https://www.github.com/googleapis/python-ndb/issues/342)) ([5a86456](https://www.github.com/googleapis/python-ndb/commit/5a864563dc6e155b73e2ac35af6519823c356e19)), closes [#277](https://www.github.com/googleapis/python-ndb/issues/277) +* use correct name when reading legacy structured properties with names ([#347](https://www.github.com/googleapis/python-ndb/issues/347)) ([01d1256](https://www.github.com/googleapis/python-ndb/commit/01d1256e9d41c20bb5836067455c4be4abe1c516)), closes [#345](https://www.github.com/googleapis/python-ndb/issues/345) + +## [1.0.1](https://www.github.com/googleapis/python-ndb/compare/v1.0.0...v1.0.1) (2020-02-11) + + +### Bug Fixes + +* attempt to have fewer transient errors in continuous integration ([#328](https://www.github.com/googleapis/python-ndb/issues/328)) ([0484c7a](https://www.github.com/googleapis/python-ndb/commit/0484c7abf5a1529db5fecf17ebdf0252eab8449e)) +* correct migration doc ([#313](https://www.github.com/googleapis/python-ndb/issues/313)) ([#317](https://www.github.com/googleapis/python-ndb/issues/317)) ([efce24f](https://www.github.com/googleapis/python-ndb/commit/efce24f16a877aecf78264946c22a2c9e3e97f53)) +* disuse `__slots__` in most places ([#330](https://www.github.com/googleapis/python-ndb/issues/330)) ([a8b723b](https://www.github.com/googleapis/python-ndb/commit/a8b723b992e7a91860f6a73c0ee0fd7071e574d3)), closes [#311](https://www.github.com/googleapis/python-ndb/issues/311) +* don't set key on structured property entities ([#312](https://www.github.com/googleapis/python-ndb/issues/312)) ([63f3d94](https://www.github.com/googleapis/python-ndb/commit/63f3d943001d77c1ea0eb9b719e71ecff4eb5dd6)), closes [#281](https://www.github.com/googleapis/python-ndb/issues/281) +* fix race condition in remote calls ([#329](https://www.github.com/googleapis/python-ndb/issues/329)) ([f550510](https://www.github.com/googleapis/python-ndb/commit/f5505100f065e71a14714369d8aef1f7b06ee838)), closes [#302](https://www.github.com/googleapis/python-ndb/issues/302) +* make query options convert projection properties to strings ([#325](https://www.github.com/googleapis/python-ndb/issues/325)) ([d1a4800](https://www.github.com/googleapis/python-ndb/commit/d1a4800c5f53490e6956c11797bd3472ea404b5b)) +* use multiple batches of limited size for large operations ([#321](https://www.github.com/googleapis/python-ndb/issues/321)) ([8e69453](https://www.github.com/googleapis/python-ndb/commit/8e6945377a4635632d0c35b7a41daebe501d4f0f)), closes [#318](https://www.github.com/googleapis/python-ndb/issues/318) +* use six string_types and integer_types for all isinstance() checks ([#323](https://www.github.com/googleapis/python-ndb/issues/323)) ([133acf8](https://www.github.com/googleapis/python-ndb/commit/133acf87b2a2efbfeae23ac9f629132cfb368a55)) + +## [1.0.0](https://www.github.com/googleapis/python-ndb/compare/v0.2.2...v1.0.0) (2020-01-30) + + +### Bug Fixes + +* add user agent prefix google-cloud-ndb + version ([#299](https://www.github.com/googleapis/python-ndb/issues/299)) ([9fa136b](https://www.github.com/googleapis/python-ndb/commit/9fa136b9c163b24aefde6ccbc227a1035fa24bcd)) +* Finish implementation of UserProperty. ([#301](https://www.github.com/googleapis/python-ndb/issues/301)) ([fd2e0ed](https://www.github.com/googleapis/python-ndb/commit/fd2e0ed9bb6cec8b5651c58eaee2b3ca8a96aebb)), closes [#280](https://www.github.com/googleapis/python-ndb/issues/280) +* Fix bug when wrapping base values. ([#303](https://www.github.com/googleapis/python-ndb/issues/303)) ([91ca8d9](https://www.github.com/googleapis/python-ndb/commit/91ca8d9044671361b731323317cef720dd19be82)), closes [#300](https://www.github.com/googleapis/python-ndb/issues/300) +* Fix bug with the _GlobalCacheGetBatch. ([#305](https://www.github.com/googleapis/python-ndb/issues/305)) ([f213165](https://www.github.com/googleapis/python-ndb/commit/f2131654c6e5f67895fb0e3c09a507e8dc25c4bb)), closes [#294](https://www.github.com/googleapis/python-ndb/issues/294) +* Preserve `QueryIterator.cursor_after`. ([#296](https://www.github.com/googleapis/python-ndb/issues/296)) ([4ffedc7](https://www.github.com/googleapis/python-ndb/commit/4ffedc7b5a2366be15dcd299052d8a46a748addd)), closes [#292](https://www.github.com/googleapis/python-ndb/issues/292) + +## [0.2.2](https://www.github.com/googleapis/python-ndb/compare/v0.2.1...v0.2.2) (2020-01-15) + + +### Bug Fixes + +* Convert NDB keys to Datastore keys for serialization. ([#287](https://www.github.com/googleapis/python-ndb/issues/287)) ([779411b](https://www.github.com/googleapis/python-ndb/commit/779411b562575bd2d6f0627ce1903c2996f3c529)), closes [#284](https://www.github.com/googleapis/python-ndb/issues/284) +* fix missing __ne__ methods ([#279](https://www.github.com/googleapis/python-ndb/issues/279)) ([03dd5e1](https://www.github.com/googleapis/python-ndb/commit/03dd5e1c78b8e8354379d743e2f810ef1bece4d2)) +* Fix repr() for ComputedProperty ([#291](https://www.github.com/googleapis/python-ndb/issues/291)) ([2d8857b](https://www.github.com/googleapis/python-ndb/commit/2d8857b8e9a7119a47fd72ae76401af4e42bb5b5)), closes [#256](https://www.github.com/googleapis/python-ndb/issues/256) +* Handle `int` for DateTimeProperty ([#285](https://www.github.com/googleapis/python-ndb/issues/285)) ([2fe5be3](https://www.github.com/googleapis/python-ndb/commit/2fe5be31784a036062180f9c0f2c7b5eda978123)), closes [#261](https://www.github.com/googleapis/python-ndb/issues/261) +* More friendly error message when using `fetch_page` with post-filters. ([#269](https://www.github.com/googleapis/python-ndb/issues/269)) ([a40ae74](https://www.github.com/googleapis/python-ndb/commit/a40ae74d74fa83119349de4b3a91f90df40d7ea5)), closes [#254](https://www.github.com/googleapis/python-ndb/issues/254) + +## [0.2.1](https://www.github.com/googleapis/python-ndb/compare/v0.2.0...v0.2.1) (2019-12-10) + + +### Bug Fixes + +* Correctly handle `limit` and `offset` when batching query results. ([#237](https://www.github.com/googleapis/python-ndb/issues/237)) ([8d3ce5c](https://www.github.com/googleapis/python-ndb/commit/8d3ce5c6cce9055d21400aa9feebc99e66393667)), closes [#236](https://www.github.com/googleapis/python-ndb/issues/236) +* Improve test cleanup. ([#234](https://www.github.com/googleapis/python-ndb/issues/234)) ([21f3d8b](https://www.github.com/googleapis/python-ndb/commit/21f3d8b12a3e2fefe488a951fb5186c7620cb864)) +* IntegerProperty now accepts `long` type for Python 2.7. ([#262](https://www.github.com/googleapis/python-ndb/issues/262)) ([9591e56](https://www.github.com/googleapis/python-ndb/commit/9591e569db32769c449d60dd3d9bdd6772dbc8f6)), closes [#250](https://www.github.com/googleapis/python-ndb/issues/250) +* Unstable order bug in unit test. ([#251](https://www.github.com/googleapis/python-ndb/issues/251)) ([7ff1df5](https://www.github.com/googleapis/python-ndb/commit/7ff1df51056f8498dc4320fc4b2684ead34a9116)), closes [#244](https://www.github.com/googleapis/python-ndb/issues/244) + +## 0.2.0 + +11-06-2019 10:39 PST + + +### Implementation Changes +- `query.map()` and `query.map_async()` hanging with empty result set. ([#230](https://github.com/googleapis/python-ndb/pull/230)) +- remove dunder version ([#202](https://github.com/googleapis/python-ndb/pull/202)) +- Check context ([#211](https://github.com/googleapis/python-ndb/pull/211)) +- Fix `Model._gql`. ([#223](https://github.com/googleapis/python-ndb/pull/223)) +- Update intersphinx mapping ([#206](https://github.com/googleapis/python-ndb/pull/206)) +- do not set meanings for compressed property when it has no value ([#200](https://github.com/googleapis/python-ndb/pull/200)) + +### New Features +- Python 2.7 compatibility ([#203](https://github.com/googleapis/python-ndb/pull/203)) +- Add `tzinfo` to DateTimeProperty. ([#226](https://github.com/googleapis/python-ndb/pull/226)) +- Implement `_prepare_for_put` for `StructuredProperty` and `LocalStructuredProperty`. ([#221](https://github.com/googleapis/python-ndb/pull/221)) +- Implement ``Query.map`` and ``Query.map_async``. ([#218](https://github.com/googleapis/python-ndb/pull/218)) +- Allow class member values in projection and distinct queries ([#214](https://github.com/googleapis/python-ndb/pull/214)) +- Implement ``Future.cancel()`` ([#204](https://github.com/googleapis/python-ndb/pull/204)) + +### Documentation +- Update README to include Python 2 support. ([#231](https://github.com/googleapis/python-ndb/pull/231)) +- Fix typo in MIGRATION_NOTES.md ([#208](https://github.com/googleapis/python-ndb/pull/208)) +- Spelling fixes. ([#209](https://github.com/googleapis/python-ndb/pull/209)) +- Add spell checking dependencies for documentation build. ([#196](https://github.com/googleapis/python-ndb/pull/196)) + +### Internal / Testing Changes +- Enable release-please ([#228](https://github.com/googleapis/python-ndb/pull/228)) +- Introduce local redis for tests ([#191](https://github.com/googleapis/python-ndb/pull/191)) +- Use .kokoro configs from templates. ([#194](https://github.com/googleapis/python-ndb/pull/194)) + +## 0.1.0 + +09-10-2019 13:43 PDT + +### Deprecations +- Deprecate `max_memcache_items`, memcache options, `force_rewrites`, `Query.map()`, `Query.map_async()`, `blobstore`. ([#168](https://github.com/googleapis/python-ndb/pull/168)) + +### Implementation Changes +- Fix error retrieving values for properties with different stored name ([#187](https://github.com/googleapis/python-ndb/pull/187)) +- Use correct class when deserializing a PolyModel entity. ([#186](https://github.com/googleapis/python-ndb/pull/186)) +- Support legacy compressed properties back and forth ([#183](https://github.com/googleapis/python-ndb/pull/183)) +- Store Structured Properties in backwards compatible way ([#184](https://github.com/googleapis/python-ndb/pull/184)) +- Allow put and get to work with compressed blob properties ([#175](https://github.com/googleapis/python-ndb/pull/175)) +- Raise an exception when storing entity with partial key without Datastore. ([#171](https://github.com/googleapis/python-ndb/pull/171)) +- Normalize to prefer ``project`` over ``app``. ([#170](https://github.com/googleapis/python-ndb/pull/170)) +- Enforce naive datetimes for ``DateTimeProperty``. ([#167](https://github.com/googleapis/python-ndb/pull/167)) +- Handle projections with structured properties. ([#166](https://github.com/googleapis/python-ndb/pull/166)) +- Fix polymodel put and get ([#151](https://github.com/googleapis/python-ndb/pull/151)) +- `_prepare_for_put` was not being called at entity level ([#138](https://github.com/googleapis/python-ndb/pull/138)) +- Fix key property. ([#136](https://github.com/googleapis/python-ndb/pull/136)) +- Fix thread local context. ([#131](https://github.com/googleapis/python-ndb/pull/131)) +- Bugfix: Respect ``_indexed`` flag of properties. ([#127](https://github.com/googleapis/python-ndb/pull/127)) +- Backwards compatibility with older style structured properties. ([#126](https://github.com/googleapis/python-ndb/pull/126)) + +### New Features +- Read legacy data with Repeated Structured Expando properties. ([#176](https://github.com/googleapis/python-ndb/pull/176)) +- Implement ``Context.call_on_commit``. ([#159](https://github.com/googleapis/python-ndb/pull/159)) +- Implement ``Context.flush`` ([#158](https://github.com/googleapis/python-ndb/pull/158)) +- Implement ``use_datastore`` flag. ([#155](https://github.com/googleapis/python-ndb/pull/155)) +- Implement ``tasklets.toplevel``. ([#157](https://github.com/googleapis/python-ndb/pull/157)) +- Add RedisCache implementation of global cache ([#150](https://github.com/googleapis/python-ndb/pull/150)) +- Implement Global Cache ([#148](https://github.com/googleapis/python-ndb/pull/148)) +- ndb.Expando properties load and save ([#117](https://github.com/googleapis/python-ndb/pull/117)) +- Implement cache policy. ([#116](https://github.com/googleapis/python-ndb/pull/116)) + +### Documentation +- Fix Kokoro publish-docs job ([#153](https://github.com/googleapis/python-ndb/pull/153)) +- Update Migration Notes. ([#152](https://github.com/googleapis/python-ndb/pull/152)) +- Add `project_urls` for pypi page ([#144](https://github.com/googleapis/python-ndb/pull/144)) +- Fix `TRAMPOLINE_BUILD_FILE` in docs/common.cfg. ([#143](https://github.com/googleapis/python-ndb/pull/143)) +- Add kokoro docs job to publish to googleapis.dev. ([#142](https://github.com/googleapis/python-ndb/pull/142)) +- Initial version of migration guide ([#121](https://github.com/googleapis/python-ndb/pull/121)) +- Add spellcheck sphinx extension to docs build process ([#123](https://github.com/googleapis/python-ndb/pull/123)) + +### Internal / Testing Changes +- Clean up usage of `object.__new__` and mocks for `Model` in unit tests ([#177](https://github.com/googleapis/python-ndb/pull/177)) +- Prove tasklets can be Python 2.7 and 3.7 compatible. ([#174](https://github.com/googleapis/python-ndb/pull/174)) +- Discard src directory and fix flake8 failures ([#173](https://github.com/googleapis/python-ndb/pull/173)) +- Add tests for `Model.__eq__()` ([#169](https://github.com/googleapis/python-ndb/pull/169)) +- Remove skip flag accidentally left over ([#154](https://github.com/googleapis/python-ndb/pull/154)) +- Try to get kokoro to add indexes for system tests ([#145](https://github.com/googleapis/python-ndb/pull/145)) +- Add system test for PolyModel ([#133](https://github.com/googleapis/python-ndb/pull/133)) +- Fix system test under Datastore Emulator. (Fixes [#118](https://github.com/googleapis/python-ndb/pull/118)) ([#119](https://github.com/googleapis/python-ndb/pull/119)) +- Add unit tests for `_entity_from_ds_entity` expando support ([#120](https://github.com/googleapis/python-ndb/pull/120)) + +## 0.0.1 + +06-11-2019 16:30 PDT + +### Implementation Changes +- Query repeated structured properties. ([#103](https://github.com/googleapis/python-ndb/pull/103)) +- Fix Structured Properties ([#102](https://github.com/googleapis/python-ndb/pull/102)) + +### New Features +- Implement expando model ([#99](https://github.com/googleapis/python-ndb/pull/99)) +- Model properties ([#96](https://github.com/googleapis/python-ndb/pull/96)) +- Implemented tasklets.synctasklet ([#58](https://github.com/googleapis/python-ndb/pull/58)) +- Implement LocalStructuredProperty ([#93](https://github.com/googleapis/python-ndb/pull/93)) +- Implement hooks. ([#95](https://github.com/googleapis/python-ndb/pull/95)) +- Three easy Model methods. ([#94](https://github.com/googleapis/python-ndb/pull/94)) +- Model.get or insert ([#92](https://github.com/googleapis/python-ndb/pull/92)) +- Implement ``Model.get_by_id`` and ``Model.get_by_id_async``. +- Implement ``Model.allocate_ids`` and ``Model.allocate_ids_async``. +- Implement ``Query.fetch_page`` and ``Query.fetch_page_async``. +- Implement ``Query.count`` and ``Query.count_async`` +- Implement ``Query.get`` and ``Query.get_async``. + +### Documentation +- update sphinx version and eliminate all warnings ([#105](https://github.com/googleapis/python-ndb/pull/105)) + +## 0.0.1dev1 + +Initial development release of NDB client library. diff --git a/packages/google-cloud-ndb/CODE_OF_CONDUCT.md b/packages/google-cloud-ndb/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..46b2a08ea6d1 --- /dev/null +++ b/packages/google-cloud-ndb/CODE_OF_CONDUCT.md @@ -0,0 +1,43 @@ +# Contributor Code of Conduct + +As contributors and maintainers of this project, +and in the interest of fostering an open and welcoming community, +we pledge to respect all people who contribute through reporting issues, +posting feature requests, updating documentation, +submitting pull requests or patches, and other activities. + +We are committed to making participation in this project +a harassment-free experience for everyone, +regardless of level of experience, gender, gender identity and expression, +sexual orientation, disability, personal appearance, +body size, race, ethnicity, age, religion, or nationality. + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery +* Personal attacks +* Trolling or insulting/derogatory comments +* Public or private harassment +* Publishing other's private information, +such as physical or electronic +addresses, without explicit permission +* Other unethical or unprofessional conduct. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct. +By adopting this Code of Conduct, +project maintainers commit themselves to fairly and consistently +applying these principles to every aspect of managing this project. +Project maintainers who do not follow or enforce the Code of Conduct +may be permanently removed from the project team. + +This code of conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. + +Instances of abusive, harassing, or otherwise unacceptable behavior +may be reported by opening an issue +or contacting one or more of the project maintainers. + +This Code of Conduct is adapted from the [Contributor Covenant](http://contributor-covenant.org), version 1.2.0, +available at [http://contributor-covenant.org/version/1/2/0/](http://contributor-covenant.org/version/1/2/0/) diff --git a/packages/google-cloud-ndb/CONTRIBUTING.rst b/packages/google-cloud-ndb/CONTRIBUTING.rst new file mode 100644 index 000000000000..b78f2e1c2a17 --- /dev/null +++ b/packages/google-cloud-ndb/CONTRIBUTING.rst @@ -0,0 +1,314 @@ +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. ``python-ndb`` is undergoing heavy development right now, so if you plan to + implement a feature, please create an issue to discuss your idea first. That + way we can coordinate and avoid possibly duplicating ongoing work. +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on ``python-ndb``. + +*************** +Adding Features +*************** + +In order to add a feature to ``python-ndb``: + +- The feature must be documented in both the API and narrative + documentation (in ``docs/``). + +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10, 3.11, 3.12, 3.13 and 3.14 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment to hack on +``python-ndb``, using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``python-ndb`` `repo`_ on GitHub. + +- Fork and clone the ``python-ndb`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``python-ndb`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-python-ndb``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/python-ndb.git hack-on-python-ndb + $ cd hack-on-python-ndb + # Configure remotes such that you can pull changes from the python-ndb + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/python-ndb.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/python-ndb + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + + $ nox -s unit-3.10 + $ nox -s unit-3.7 + $ ... + +.. nox: https://pypi.org/project/nox-automation/ + +- To run unit tests that use Memcached or Redis, you must have them running and set the appropriate environment variables: + + $ export MEMCACHED_HOSTS=localhost:11211 + $ export REDIS_CACHE_URL=redis://localhost:6379 + + +Note on Editable Installs / Develop Mode +======================================== + +- As mentioned previously, using ``setuptools`` in `develop mode`_ + or a ``pip`` `editable install`_ is not possible with this + library. This is because this library uses `namespace packages`_. + For context see `Issue #2316`_ and the relevant `PyPA issue`_. + + Since ``editable`` / ``develop`` mode can't be used, packages + need to be installed directly. Hence your changes to the source + tree don't get incorporated into the **already installed** + package. + +.. _namespace packages: https://www.python.org/dev/peps/pep-0420/ +.. _Issue #2316: https://github.com/googleapis/google-cloud-python/issues/2316 +.. _PyPA issue: https://github.com/pypa/packaging-problems/issues/12 +.. _develop mode: https://setuptools.readthedocs.io/en/latest/setuptools.html#development-mode +.. _editable install: https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ + +- PEP8 compliance, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``python-ndb``. The the suggested remote name ``upstream`` + should point to the official ``googleapis`` checkout and the + the branch should be the main branch on that remote (``main``). + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests for a given package, you can execute:: + + $ export SYSTEM_TESTS_DATABASE=system-tests-named-db + $ nox -e system + + .. note:: + + System tests are only configured to run under Python 3.14. For + expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests may be run against the emulator. To do this, set the + ``DATASTORE_EMULATOR_HOST`` environment variable. Alternatively, + system tests with the emulator can run with + `nox -e emulator-system-PYTHON_VERSION` + +- System tests will be run against an actual project and + so you'll need to provide some environment variables to facilitate + authentication to your project: + + - ``GOOGLE_APPLICATION_CREDENTIALS``: The path to a JSON key file; + see ``system_tests/app_credentials.json.sample`` as an example. Such a file + can be downloaded directly from the developer's console by clicking + "Generate new JSON key". See private key + `docs `__ + for more details. + + - In order for Logging system tests to work, the Service Account + will also have to be made a project ``Owner``. This can be changed under + "IAM & Admin". Additionally, ``cloud-logs@google.com`` must be given + ``Editor`` permissions on the project. + +- For datastore tests, you'll need to create composite + `indexes `__ + with the ``gcloud`` command line + `tool `__:: + + # Install the app (App Engine Command Line Interface) component. + $ gcloud components install app-engine-python + + # Authenticate the gcloud tool with your account. + $ GOOGLE_APPLICATION_CREDENTIALS="path/to/app_credentials.json" + $ gcloud auth activate-service-account \ + > --key-file=${GOOGLE_APPLICATION_CREDENTIALS} + + # Create the indexes + $ gcloud datastore indexes create tests/system/index.yaml + $ gcloud alpha datastore indexes create --database=$SYSTEM_TESTS_DATABASE tests/system/index.yaml + + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +To build and review docs (where ``${VENV}`` refers to the virtualenv you're +using to develop ``python-ndb``): + +#. After following the steps above in "Using a Development Checkout", install + Sphinx and all development requirements in your virtualenv:: + + $ cd ${HOME}/hack-on-python-ndb + $ ${VENV}/bin/pip install Sphinx + +#. Change into the ``docs`` directory within your ``python-ndb`` checkout and + execute the ``make`` command with some flags:: + + $ cd ${HOME}/hack-on-python-ndb/docs + $ make clean html SPHINXBUILD=${VENV}/bin/sphinx-build + + The ``SPHINXBUILD=...`` argument tells Sphinx to use the virtualenv Python, + which will have both Sphinx and ``python-ndb`` (for API documentation + generation) installed. + +#. Open the ``docs/_build/html/index.html`` file to see the resulting HTML + rendering. + +As an alternative to 1. and 2. above, if you have ``nox`` installed, you +can build the docs via:: + + $ nox -s docs + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/python-ndb/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud/ + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ +- `Python 3.12`_ +- `Python 3.13`_ +- `Python 3.14`_ + +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ +.. _Python 3.12: https://docs.python.org/3.12/ +.. _Python 3.13: https://docs.python.org/3.13/ +.. _Python 3.14: https://docs.python.org/3.14/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/python-ndb/blob/main/noxfile.py + + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-ndb/LICENSE b/packages/google-cloud-ndb/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-ndb/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-ndb/MIGRATION_NOTES.md b/packages/google-cloud-ndb/MIGRATION_NOTES.md new file mode 100644 index 000000000000..3022b42938a5 --- /dev/null +++ b/packages/google-cloud-ndb/MIGRATION_NOTES.md @@ -0,0 +1,375 @@ +# `ndb` Migration Notes + +This is a collection of assumptions, API / implementation differences +and comments about the `ndb` rewrite process. + +The primary differences come from: + +- Absence of "legacy" APIs provided by Google App Engine (e.g. + `google.appengine.api.datastore_types`) as well as other environment + specific features (e.g. the `APPLICATION_ID` environment variable) +- Differences in Datastore APIs between the versions provided by Google App + Engine and Google Clould Platform. +- Presence of new features in Python 3 like keyword only arguments and + async support + +## Bootstrapping + +The biggest difference is in establishing a runtime context for your NDB +application. The Google App Engine Python 2.7 runtime had a strong assumption +that all code executed inside a web framework request-response cycle, in a +single thread per request. In order to decouple from that assumption, Cloud NDB +implements explicit clients and contexts. This is consistent with other Cloud +client libraries. + +The ``Client`` class has been introduced which by and large works the same as +Datastore's ``Client`` class and uses ``google.auth`` for authentication. You +can pass a ``credentials`` parameter to ``Client`` or use the +``GOOGLE_APPLICATION_CREDENTIALS`` environment variable (recommended). See +[https://cloud.google.com/docs/authentication/getting-started] for details. + +Once a client has been obtained, you still need to establish a runtime context, +which you can do using the ``Client.context`` method. + +``` +from google.cloud import ndb + +# Assume GOOGLE_APPLICATION_CREDENTIALS is set in environment +client = ndb.Client() + +with client.context() as context: + do_stuff_with_ndb() +``` + +## Memcache + +Because the Google App Engine Memcache service is not a part of the Google +Cloud Platform, it was necessary to refactor the "memcache" functionality of +NDB. The concept of a memcache has been generalized to that of a "global cache" +and defined by the `GlobalCache` interface, which is an abstract base class. +NDB provides a single concrete implementation of `GlobalCache`, `RedisCache`, +which uses Redis. + +In order to enable the global cache, a `GlobalCache` instance must be passed +into the context. The Bootstrapping example can be amended as follows: + +``` +from google.cloud import ndb + +# Assume GOOGLE_APPLICATION_CREDENTIALS is set in environment. +client = ndb.Client() + +# Assume REDIS_CACHE_URL is set in environment (or not). +# If left unset, this will return `None`, which effectively allows you to turn +# global cache on or off using the environment. +global_cache = ndb.RedisCache.from_environment() + +with client.context(global_cache=global_cache) as context: + do_stuff_with_ndb() +``` + +`context.Context` had a number of methods that were direct pass-throughs to GAE +Memcache. These are no longer implemented. The methods of `context.Context` +that are affected are: `memcache_add`, `memcache_cas`, `memcache_decr`, +`memcache_delete`, `memcache_get`, `memcache_gets`, `memcache_incr`, +`memcache_replace`, `memcache_set`. + +## Differences (between old and new implementations) + +- The "standard" exceptions from App Engine are no longer available. Instead, + we'll create "shims" for them in `google.cloud.ndb.exceptions` to match the + class names and emulate behavior. +- There is no replacement for `google.appengine.api.namespace_manager` which is + used to determine the default namespace when not passed in to `Key()` +- The `Key()` constructor (and helpers) make a distinction between `unicode` + and `str` types (in Python 2). These are now `unicode->str` and `str->bytes`. + However, `google.cloud.datastore.Key()` (the actual type we use under the + covers), only allows the `str` type in Python 3, so much of the "type-check + and branch" from the original implementation is gone. This **may** cause + some slight differences. +- `Key.from_old_key()` and `Key.to_old_key()` always raise + `NotImplementedError`. Without the actual types from the legacy runtime, + these methods are impossible to implement. Also, since this code won't + run on legacy Google App Engine, these methods aren't needed. +- `Key.app()` may not preserve the prefix from the constructor (this is noted + in the docstring) +- `Key.__eq__` previously claimed to be "performance-conscious" and directly + used `self.__app == other.__app` and similar comparisons. We don't store the + same data on our `Key` (we just make a wrapper around + `google.cloud.datastore.Key`), so these are replaced by functions calls + `self.app() == self.app()` which incur some overhead. +- The verification of kind / string ID fails when they exceed 1500 bytes. The + original implementation didn't allow in excess of 500 bytes, but it seems + the limit has been raised by the backend. (FWIW, Danny's opinion is that + the backend should enforce these limits, not the library.) +- `Property.__creation_counter_global` has been removed as it seems to have + been included for a feature that was never implemented. See + [Issue #175][1] for original rationale for including it and [Issue #6317][2] + for discussion of its removal. +- `ndb` uses "private" instance attributes in many places, e.g. `Key.__app`. + The current implementation (for now) just uses "protected" attribute names, + e.g. `Key._key` (the implementation has changed in the rewrite). We may want + to keep the old "private" names around for compatibility. However, in some + cases, the underlying representation of the class has changed (such as `Key`) + due to newly available helper libraries or due to missing behavior from + the legacy runtime. +- `query.PostFilterNode.__eq__` compares `self.predicate` to `other.predicate` + rather than using `self.__dict__ == other.__dict__` +- `__slots__` have been added to most non-exception types for a number of + reasons. The first is the naive "performance" win and the second is that + this will make it transparent whenever `ndb` users refer to non-existent + "private" or "protected" instance attributes +- I dropped `Property._positional` since keyword-only arguments are native + Python 3 syntax and dropped `Property._attributes` in favor of an + approach using `inspect.signature()` +- A bug in `Property._find_methods` was fixed where `reverse=True` was applied + **before** caching and then not respected when pulling from the cache +- The `Property._find_methods_cache` has been changed. Previously it would be + set on each `Property` subclass and populated dynamically on first use. + Now `Property._FIND_METHODS_CACHE` is set to `{}` when the `Property` class + is created and there is another level of keys (based on fully-qualified + class name) in the cache. +- `BlobProperty._datastore_type` has not been implemented; the base class + implementation is sufficient. The original implementation wrapped a byte + string in a `google.appengine.api.datastore_types.ByteString` instance, but + that type was mostly an alias for `str` in Python 2 +- `BlobProperty._validate` used to special case for "too long when indexed" + if `isinstance(self, TextProperty)`. We have removed this check since + the implementation does the same check in `TextProperty._validate`. +- The `BlobProperty` constructor only sets `_compressed` if explicitly + passed. The original set `_compressed` always (and used `False` as default). + In the exact same fashion the `JsonProperty` constructor only sets + `_json_type` if explicitly passed. Similarly, the `DateTimeProperty` + constructor only sets `_auto_now` and `_auto_now_add` if explicitly passed. +- `TextProperty(indexed=True)` and `StringProperty(indexed=False)` are no + longer supported (see docstrings for more info) +- `model.GeoPt` is an alias for `google.cloud.datastore.helpers.GeoPoint` + rather than an alias for `google.appengine.api.datastore_types.GeoPt`. These + classes have slightly different characteristics. +- The `Property()` constructor (and subclasses) originally accepted both + `unicode` and `str` (the Python 2 versions) for `name` (and `kind`) but we + only accept `str`. +- The `Parameter()` constructor (and subclasses) originally accepted `int`, + `unicode` and `str` (the Python 2 versions) for `key` but we only accept + `int` and `str`. +- When a `Key` is used to create a query "node", e.g. via + `MyModel.my_value == some_key`, the underlying behavior has changed. + Previously a `FilterNode` would be created with the actual value set to + `some_key.to_old_key()`. Now, we set it to `some_key._key`. +- The `google.appengine.api.users.User` class is missing, so there is a + replacement in `google.cloud.ndb.model.User` that is also available as + `google.cloud.ndb.User`. This does not support federated identity and + has new support for adding such a user to a `google.cloud.datastore.Entity` + and for reading one from a new-style `Entity` +- The `UserProperty` class no longer supports `auto_current_user(_add)` +- `Model.__repr__` will use `_key` to describe the entity's key when there + is also a user-defined property named `key`. For an example, see the + class docstring for `Model`. +- `Future.set_exception` no longer takes `tb` argument. Python 3 does a good + job of remembering the original traceback for an exception and there is no + longer any value added by manually keeping track of the traceback ourselves. + This method shouldn't generally be called by user code, anyway. +- `Future.state` is omitted as it is redundant. Call `Future.done()` or + `Future.running()` to get the state of a future. +- `StringProperty` properties were previously stored as blobs + (entity_pb2.Value.blob_value) in Datastore. They are now properly stored as + strings (entity_pb2.Value.string_value). At read time, a `StringProperty` + will accept either a string or blob value, so compatibility is maintained + with legacy databases. +- The QueryOptions class from google.cloud.ndb.query, has been reimplemented, + since google.appengine.datastore.datastore_rpc.Configuration is no longer + available. It still uses the same signature, but does not support original + Configuration methods. +- Because google.appengine.datastore.datastore_query.Order is no longer + available, the ndb.query.PropertyOrder class has been created to replace it. +- Transaction propagation is no longer supported. This was a feature of the + older Datastore RPC library which is no longer used. Starting a new + transaction when a transaction is already in progress in the current context + will result in an error, as will passing a value for the `propagation` option + when starting a transaction. +- The `xg` option for transactions is ignored. Previously, setting this to + `True`, allowed writes up 5 entity groups in a transaction, as opposed to + only being able to write to a single entity group. In Datastore, currently, + writing up to 25 entity groups in a transaction is supported by default and + there is no option to change this. +- Datastore API does not support Entity Group metadata queries anymore, so + `google.cloud.ndb.metadata.EntityGroup` and + `google.cloud.ndb.metadata.get_entity_group_version` both throw a + `google.cloud.ndb.exceptions.NoLongerImplementedError` exception when used. +- The `batch_size` and `prefetch_size` arguments to `Query.fetch` and + `Query.fetch_async` are no longer supported. These were passed through + directly to Datastore, which no longer supports these options. +- The `index_list` method of `QueryIterator` is not implemented. Datastore no + longer returns this data with query results, so it is not available from the + API in this way. +- The `produce_cursors` query option is deprecated. Datastore always returns + cursors, where it can, and NDB always makes them available when possible. + This option can be passed in but it will be ignored. +- The `max` argument to `Model.allocate_ids` and `Model.allocate_ids_async` is + no longer supported. The Google Datastore API does not support setting a + maximum ID, a feature that GAE Datastore presumably had. +- `model.get_indexes()` and `model.get_indexes_async()` are no longer + implemented, as the support in Datastore for these functions has disappeared + from GAE to GCP. +- The `max_memcache_items` option is no longer supported. +- The `force_writes` option is no longer supported. +- The `blobstore` module is no longer supported. +- The `pass_batch_into_callback` argument to `Query.map` and `Query.map_async` + is no longer supported. +- The `merge_future` argument to `Query.map` and `Query.map_async` is no longer + supported. +- Key.urlsafe() output is subtly different: the original NDB included a GAE + Datastore-specific "location prefix", but that string is neither necessary + nor available on Cloud Datastore. For applications that require urlsafe() + strings to be exactly consistent between versions, use + Key.to_legacy_urlsafe(location_prefix) and pass in your location prefix as an + argument. Location prefixes are most commonly "s~" (or "e~" in Europe) but + the easiest way to find your prefix is to base64 decode any urlsafe key + produced by the original NDB and manually inspect it. The location prefix + will be consistent for an App Engine project and its corresponding Datastore + instance over its entire lifetime. +- Key.urlsafe outputs a "bytes" object on Python 3. This is consistent behavior + and actually just a change in nomenclature; in Python 2, the "str" type + referred to a bytestring, and in Python 3 the corresponding type is called + "bytes". Users may notice a difficulty in incorporating urlsafe() strings in + JSON objects in Python 3; that is due to a change in the json.JSONEncoder + default behavior between Python 2 and Python 3 (in Python 2, json.JSONEncoder + accepted bytestrings and attempted to convert them to unicode automatically, + which can result in corrupted data and as such is no longer done) and does not + reflect a change in NDB behavior. + +## Privatization + +App Engine NDB exposed some internal utilities as part of the public API. A few +bits of the nominally public API have been found to be *de facto* private. +These are pieces that are omitted from public facing documentation and which +have no apparent use outside of NDB internals. These pieces have been formally +renamed as part of the private API: + +- `eventloop` has been renamed to `_eventloop`. +- `tasklets.get_return_value` has been renamed to `tasklets._get_return_value` + and is no longer among top level exports. +- `tasklets.MultiFuture` has been renamed to `tasklets._MultiFuture`, removed + from top level exports, and has a much simpler interface. + +These options classes appear not to have been used directly by users and are +not implemented—public facing API used keyword arguments instead, which are +still supported: + +- `ContextOptions` +- `TransactionOptions` + +The following pieces appear to have been only used internally and are no longer +implemented due to the features they were used for having been refactored: + +- `Query.run_to_queue` +- `tasklets.add_flow_exception` +- `tasklets.make_context` +- `tasklets.make_default_context` +- `tasklets.QueueFuture` +- `tasklets.ReducingFuture` +- `tasklets.SerialQueueFuture` +- `tasklets.set_context` + +A number of functions in the `utils` package appear to have only been used +internally and have been made obsolete either by API changes, internal +refactoring, or new features of Python 3, and are no longer implemented: + +- `utils.code_info()` +- `utils.decorator()` +- `utils.frame_info()` +- `utils.func_info()` +- `utils.gen_info()` +- `utils.get_stack()` +- `utils.logging_debug()` +- `utils.positional()` +- `utils.tweak_logging()` +- `utils.wrapping()` (use `functools.wraps` instead) +- `utils.threading_local()` + +## Bare Metal + +One of the largest classes of differences comes from the use of the current +Datastore API, rather than the legacy App Engine Datastore. In general, for +users coding to the public interface, this won't be an issue, but users relying +on pieces of the ostensibly private API that are exposed to the bare metal of +the original datastore implementation will have to rewrite those pieces. +Specifically, any function or method that dealt directly with protocol buffers +will no longer work. The Datastore `.protobuf` definitions have changed +significantly from the barely public API used by App Engine to the current +published API. Additionally, this version of NDB mostly delegates to +`google.cloud.datastore` for parsing data returned by RPCs, which is a +significant internal refactoring. + +- `ModelAdapter` is no longer used. In legacy NDB, this was passed to the + Datastore RPC client so that calls to Datastore RPCs could yield NDB entities + directly from Datastore RPC calls. AFAIK, Datastore no longer accepts an + adapter for adapting entities. At any rate, we no longer do it that way. +- `Property._db_get_value`, `Property._db_set_value`, are no longer used. They + worked directly with Datastore protocol buffers, work which is now delegated + to `google.cloud.datastore`. +- `Property._db_set_compressed_meaning` and + `Property._db_set_uncompressed_meaning` were used by `Property._db_set_value` + and are no longer used. +- `Model._deserialize` and `Model._serialize` are no longer used. They worked + directly with protocol buffers, so weren't really salvageable. Unfortunately, + there were comments indicating they were overridden by subclasses. Hopefully + this isn't broadly the case. +- `model.make_connection` is no longer implemented. + +## Comments + +- There is rampant use (and abuse) of `__new__` rather than `__init__` as + a constructor as the original implementation. By using `__new__`, sometimes + a **different** type is used from the constructor. It seems that feature, + along with the fact that `pickle` only calls `__new__` (and never `__init__`) + is why `__init__` is almost never used. +- The `Key.__getnewargs__()` method isn't needed. For pickle protocols 0 and 1, + `__new__` is not invoked on a class during unpickling; the state "unpacking" + is handled solely via `__setstate__`. However, for pickle protocols 2, 3 + and 4, during unpickling an instance will first be created via + `Key.__new__()` and then `__setstate__` would be called on that instance. + The addition of the `__getnewargs__` allows the (positional) arguments to be + stored in the pickled bytes. **All** of the work of the constructor happens + in `__new__`, so the call to `__setstate__` is redundant. In our + implementation `__setstate__` is sufficient, hence `__getnewargs__` isn't + needed. +- Key parts (i.e. kind, string ID and / or integer ID) are verified when a + `Reference` is created. However, this won't occur when the corresponding + protobuf for the underlying `google.cloud.datastore.Key` is created. This + is because the `Reference` is a legacy protobuf message type from App + Engine, while the latest (`google/datastore/v1`) RPC definition uses a `Key`. +- There is a `Property._CREATION_COUNTER` that gets incremented every time + a new `Property()` instance is created. This increment is not threadsafe. + However, `ndb` was designed for `Property()` instances to be created at + import time, so this may not be an issue. +- `ndb.model._BaseValue` for "wrapping" non-user values should probably + be dropped or redesigned if possible. +- Since we want "compatibility", suggestions in `TODO` comments have not been + implemented. However, that policy can be changed if desired. +- It seems that `query.ConjunctionNode.__new__` had an unreachable line + that returned a `FalseNode`. This return has been changed to a + `RuntimeError` just it case it **is** actually reached. +- For ``AND`` and ``OR`` to compare equal, the nodes must come in the + same order. So ``AND(a > 7, b > 6)`` is not equal to ``AND(b > 6, a > 7)``. +- It seems that `query.ConjunctionNode.__new__` had an unreachable line + that returned a `FalseNode`. This return has been changed to a + `RuntimeError` just it case it **is** actually reached. +- For ``AND`` and ``OR`` to compare equal, the nodes must come in the + same order. So ``AND(a > 7, b > 6)`` is not equal to ``AND(b > 6, a > 7)``. +- The whole `bytes` vs. `str` issue needs to be considered package-wide. + For example, the `Property()` constructor always encoded Python 2 `unicode` + to a Python 2 `str` (i.e. `bytes`) with the `utf-8` encoding. This fits + in some sense: the property name in the [protobuf definition][3] is a + `string` (i.e. UTF-8 encoded text). However, there is a bit of a disconnect + with other types that use property names, e.g. `FilterNode`. +- There is a giant web of module interdependency, so runtime imports (to avoid + import cycles) are very common. For example `model.Property` depends on + `query` but `query` depends on `model`. +- Will need to sort out dependencies on old RPC implementations and port to + modern gRPC. ([Issue #6363][4]) + +[1]: https://github.com/GoogleCloudPlatform/datastore-ndb-python/issues/175 +[2]: https://github.com/googleapis/google-cloud-python/issues/6317 +[3]: https://github.com/googleapis/googleapis/blob/3afba2fd062df0c89ecd62d97f912192b8e0e0ae/google/datastore/v1/entity.proto#L203 +[4]: https://github.com/googleapis/google-cloud-python/issues/6363 diff --git a/packages/google-cloud-ndb/README.md b/packages/google-cloud-ndb/README.md new file mode 100644 index 000000000000..af41ed1e84bf --- /dev/null +++ b/packages/google-cloud-ndb/README.md @@ -0,0 +1,45 @@ +# Google Cloud Datastore `ndb` Client Library + +[![stable](https://img.shields.io/badge/support-stable-gold.svg)](https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels) +[![pypi](https://img.shields.io/pypi/v/google-cloud-ndb.svg)](https://pypi.org/project/google-cloud-ndb/) +[![versions](https://img.shields.io/pypi/pyversions/google-cloud-ndb.svg)](https://pypi.org/project/google-cloud-ndb/) + +## Introduction + +This is an updated version of the `ndb` client library for use with +[Google Cloud Datastore][0]. + +* [Client Library Documentation](https://googleapis.dev/python/python-ndb/latest) +* [Product Documentation](https://cloud.google.com/datastore/docs) + +The original Python 2 version of `ndb` was designed specifically for the +[Google App Engine][1] `python27` runtime and can be found at +https://github.com/GoogleCloudPlatform/datastore-ndb-python. This version of +`ndb` is designed for the [Google App Engine Python 3 runtime][2], and will +run on other Python platforms as well. + +[0]: https://cloud.google.com/datastore +[1]: https://cloud.google.com/appengine +[2]: https://cloud.google.com/appengine/docs/standard/python3/ + +## Release Status + +GA + +## Supported Python Versions + +Our client libraries are compatible with all current [active][3] and [maintenance][4] versions of Python. + +Python >= 3.7 + +[3]: https://devguide.python.org/devcycle/#in-development-main-branch +[4]: https://devguide.python.org/devcycle/#maintenance-branches + +### Unsupported Python Versions + +Python <= 3.6 + +If you are using an [end-of-life][5] +version of Python, we recommend that you update as soon as possible to an actively supported version. + +[5]: https://devguide.python.org/devcycle/#end-of-life-branches diff --git a/packages/google-cloud-ndb/SECURITY.md b/packages/google-cloud-ndb/SECURITY.md new file mode 100644 index 000000000000..8b58ae9c01ae --- /dev/null +++ b/packages/google-cloud-ndb/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/packages/google-cloud-ndb/docs/Makefile b/packages/google-cloud-ndb/docs/Makefile new file mode 100644 index 000000000000..298ea9e213e8 --- /dev/null +++ b/packages/google-cloud-ndb/docs/Makefile @@ -0,0 +1,19 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) \ No newline at end of file diff --git a/packages/google-cloud-ndb/docs/_static/images/favicon.ico b/packages/google-cloud-ndb/docs/_static/images/favicon.ico new file mode 100644 index 000000000000..23c553a2966c Binary files /dev/null and b/packages/google-cloud-ndb/docs/_static/images/favicon.ico differ diff --git a/packages/google-cloud-ndb/docs/blobstore.rst b/packages/google-cloud-ndb/docs/blobstore.rst new file mode 100644 index 000000000000..3a2cb861ed0d --- /dev/null +++ b/packages/google-cloud-ndb/docs/blobstore.rst @@ -0,0 +1,10 @@ +######### +Blobstore +######### + +.. automodule:: google.cloud.ndb.blobstore + :members: + :inherited-members: + :undoc-members: + :show-inheritance: + :exclude-members: BlobKey diff --git a/packages/google-cloud-ndb/docs/client.rst b/packages/google-cloud-ndb/docs/client.rst new file mode 100644 index 000000000000..fced930b2cdc --- /dev/null +++ b/packages/google-cloud-ndb/docs/client.rst @@ -0,0 +1,7 @@ +###### +Client +###### + +.. automodule:: google.cloud.ndb.client + :members: + :show-inheritance: diff --git a/packages/google-cloud-ndb/docs/cloud-core_objects.inv b/packages/google-cloud-ndb/docs/cloud-core_objects.inv new file mode 100644 index 000000000000..55ca400d2415 Binary files /dev/null and b/packages/google-cloud-ndb/docs/cloud-core_objects.inv differ diff --git a/packages/google-cloud-ndb/docs/conf.py b/packages/google-cloud-ndb/docs/conf.py new file mode 100644 index 000000000000..8e26d673e214 --- /dev/null +++ b/packages/google-cloud-ndb/docs/conf.py @@ -0,0 +1,239 @@ +# -*- coding: utf-8 -*- +# +# Configuration file for the Sphinx documentation builder. +# +# This file does only contain a selection of the most common options. For a +# full list see the documentation: +# http://www.sphinx-doc.org/en/master/config + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +# import os +# import sys +# sys.path.insert(0, os.path.abspath('.')) + +import google.cloud.ndb # ``ndb`` must be installed to build the docs. + +# -- Project information ----------------------------------------------------- + +project = "ndb" +copyright = "2018, Google" +author = "Google APIs" + + +# -- General configuration --------------------------------------------------- + +# If your documentation needs a minimal Sphinx version, state it here. +# +# needs_sphinx = '1.0' +nitpicky = True +nitpick_ignore = [ + ("py:obj", "google.cloud.datastore._app_engine_key_pb2.Reference"), + ("py:class", "google.cloud.datastore._app_engine_key_pb2.Reference"), + ("py:class", "google.cloud.datastore_v1.proto.entity_pb2.Entity"), + ("py:class", "_datastore_query.Cursor"), + ("py:meth", "_datastore_query.Cursor.urlsafe"), + ("py:class", "google.cloud.ndb.context._Context"), + ("py:class", "google.cloud.ndb.metadata._BaseMetadata"), + ("py:class", "google.cloud.ndb.model._NotEqualMixin"), + ("py:class", "google.cloud.ndb._options.ReadOptions"), + ("py:class", "QueryIterator"), + ("py:class", ".."), + ("py:class", "Any"), + ("py:class", "Callable"), + ("py:class", "Dict"), + ("py:class", "Iterable"), + ("py:class", "List"), + ("py:class", "Optional"), + ("py:class", "Tuple"), + ("py:class", "Union"), + ("py:class", "redis.Redis"), + ("py:class", "pymemcache.Client"), +] + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.doctest", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# +# source_suffix = ['.rst', '.md'] +source_suffix = ".rst" + +# The master toctree document. +master_doc = "index" + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = None + + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# +# html_theme_options = {} +html_favicon = "_static/images/favicon.ico" + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Custom sidebar templates, must be a dictionary that maps document names +# to template names. +# +# The default sidebars (for documents that don't match any pattern) are +# defined by theme itself. Builtin themes are using these templates by +# default: ``['localtoc.html', 'relations.html', 'sourcelink.html', +# 'searchbox.html']``. +# +# html_sidebars = {} + + +# -- Options for HTMLHelp output --------------------------------------------- + +# Output file base name for HTML help builder. +htmlhelp_basename = "ndbdoc" + + +# -- Options for LaTeX output ------------------------------------------------ + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # + # 'preamble': '', + # Latex figure (float) alignment + # + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [(master_doc, "ndb.tex", "ndb Documentation", "Google LLC", "manual")] + + +# -- Options for manual page output ------------------------------------------ + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [(master_doc, "ndb", "ndb Documentation", [author], 1)] + + +# -- Options for Texinfo output ---------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + master_doc, + "ndb", + "ndb Documentation", + author, + "ndb", + "One line description of project.", + "Miscellaneous", + ) +] + + +# -- Options for Epub output ------------------------------------------------- + +# Bibliographic Dublin Core info. +epub_title = project + +# The unique identifier of the text. This can be a ISBN number +# or the project homepage. +# +# epub_identifier = '' + +# A unique identification for the text. +# +# epub_uid = '' + +# A list of files that should not be packed into the epub file. +epub_exclude_files = ["search.html"] + + +# -- Extension configuration ------------------------------------------------- + +# -- Options for intersphinx extension --------------------------------------- + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://docs.python.org/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/latest/", None), + "google-cloud-datastore": ( + "https://cloud.google.com/python/docs/reference/datastore/latest/", + (None, "datastore_objects.inv"), + ), + "google-api-core": ( + "https://googleapis.dev/python/google-api-core/latest", + None, + ), + "google-cloud-core": ( + "https://cloud.google.com/python/docs/reference/google-cloud-core/latest/", + (None, "cloud-core_objects.inv"), + ), + "grpc": ("https://grpc.io/grpc/python/", None), +} + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-ndb/docs/context.rst b/packages/google-cloud-ndb/docs/context.rst new file mode 100644 index 000000000000..22135972d61e --- /dev/null +++ b/packages/google-cloud-ndb/docs/context.rst @@ -0,0 +1,7 @@ +####### +Context +####### + +.. automodule:: google.cloud.ndb.context + :members: + :show-inheritance: diff --git a/packages/google-cloud-ndb/docs/datastore_objects.inv b/packages/google-cloud-ndb/docs/datastore_objects.inv new file mode 100644 index 000000000000..a8b89d66a64c Binary files /dev/null and b/packages/google-cloud-ndb/docs/datastore_objects.inv differ diff --git a/packages/google-cloud-ndb/docs/django-middleware.rst b/packages/google-cloud-ndb/docs/django-middleware.rst new file mode 100644 index 000000000000..19f83cb914d8 --- /dev/null +++ b/packages/google-cloud-ndb/docs/django-middleware.rst @@ -0,0 +1,9 @@ +################# +Django Middleware +################# + +.. automodule:: google.cloud.ndb.django_middleware + :members: + :inherited-members: + :undoc-members: + :show-inheritance: diff --git a/packages/google-cloud-ndb/docs/exceptions.rst b/packages/google-cloud-ndb/docs/exceptions.rst new file mode 100644 index 000000000000..7c5743e8daa5 --- /dev/null +++ b/packages/google-cloud-ndb/docs/exceptions.rst @@ -0,0 +1,8 @@ +########## +Exceptions +########## + +.. automodule:: google.cloud.ndb.exceptions + :members: + :undoc-members: + :show-inheritance: diff --git a/packages/google-cloud-ndb/docs/global_cache.rst b/packages/google-cloud-ndb/docs/global_cache.rst new file mode 100644 index 000000000000..69a3ffcb9e42 --- /dev/null +++ b/packages/google-cloud-ndb/docs/global_cache.rst @@ -0,0 +1,7 @@ +############ +Global Cache +############ + +.. automodule:: google.cloud.ndb.global_cache + :members: + :show-inheritance: diff --git a/packages/google-cloud-ndb/docs/index.rst b/packages/google-cloud-ndb/docs/index.rst new file mode 100644 index 000000000000..1e876df00ab7 --- /dev/null +++ b/packages/google-cloud-ndb/docs/index.rst @@ -0,0 +1,268 @@ +########################################## +``ndb`` library for Google Cloud Datastore +########################################## + +.. toctree:: + :hidden: + :maxdepth: 2 + + client + context + global_cache + key + model + query + tasklets + exceptions + polymodel + django-middleware + msgprop + blobstore + metadata + stats + migrating + +This is a Python 3 version of the `ndb` client library for use with +`Google Cloud Datastore `_. + +The `original Python 2 version +`_ was designed +specifically for the Google App Engine `python27` runtime. This version of +`ndb` is designed for the `Google App Engine Python 3 runtime +`_ and will run on +other Python 3 platforms as well. + +Installing ``ndb`` +================== + +``ndb`` can be installed using pip:: + + $ pip install google-cloud-ndb + +Before you can use ``ndb``, you need a way to authenticate with Google. The +recommended way to do this is to create a `service account +`_ that is +associated with the Google Cloud project that you'll be working on. Detailed +instructions are on the link above, but basically once you create the account +you will be able to download a JSON file with your credentials which you can +store locally. + +Once you have the credentials, the best way to let your application know about +them is to set an environment variable with the path to the JSON file. On +Linux:: + + export GOOGLE_APPLICATION_CREDENTIALS="/path/to/credentials.json" + +From the Windows command prompt:: + + set GOOGLE_APPLICATION_CREDENTIALS=C:\path\to\credentials.json + +To test that your credentials work, try this from the Python environment where +you installed ``ndb``:: + + >>> from google.cloud import ndb + >>> client = ndb.Client() + >>> client + + +If your credentials are OK, you will have an active client. Otherwise, Python +will raise a `google.auth.exceptions.DefaultCredentialsError` exception. + +Next, you'll need to enable Firestore with Datastore API to your project. To do +that, select "APIs & Services" from the Google Cloud Platform menu, then "Enable +APIs and Services". From there, look for "Databases" in the Category filter. +Make sure that both "Cloud Datastore API" and "Google Cloud Firestore API" are +enabled. + +Accessing a specific project, database, or namespace +==================================================== + +A client can be bound to a chosen Google Cloud project, database, and/or namespace +by passing one or more of these options to the client constructor:: + + client = ndb.Client( + project="your-project-id", + database="your-database-id", + namespace="your-namespace" + ) + +Defining Entities, Keys, and Properties +======================================= + +Now that we have completed setup, we can start writing applications. Let's +begin by introducing some of ``ndb``'s most important concepts. + +Cloud Datastore stores data objects, called entities. An entity has one or more +properties, named values of one of several supported data types. For example, a +property can be a string, an integer, or a reference to another entity. + +Each entity is identified by a key, an identifier unique within the +application's datastore. The key can have a parent, another key. This parent +can itself have a parent, and so on; at the top of this "chain" of parents is a +key with no parent, called the root. + +Entities whose keys have the same root form an entity group or group. If +entities are in different groups, then changes to those entities might +sometimes seem to occur "out of order". If the entities are unrelated in your +application's semantics, that's fine. But if some entities' changes should be +consistent, your application should make them part of the same group when +creating them. + +In practice, this would look like the following. Assume we want to keep track +of personal contacts. Our entities might look like this:: + + from google.cloud import ndb + + class Contact(ndb.Model): + name = ndb.StringProperty() + phone = ndb.StringProperty() + email = ndb.StringProperty() + +For now, we'll keep it simple. For each contact, we'll have a name, a phone +number, and an email. This is defined in the above code. Notice that our +`Contact` class inherits from `google.cloud.ndb.Model`. A model is a class +that describes a type of entity, including the types and configuration for its +properties. It's roughly analogous to a SQL Table. An entity can be created by +calling the model's class constructor and then stored by calling the put() +method. + +Now that we have our model, let's create a couple of entities:: + + client = ndb.Client() + with client.context(): + contact1 = Contact(name="John Smith", + phone="555 617 8993", + email="john.smith@gmail.com") + contact1.put() + contact2 = Contact(name="Jane Doe", + phone="555 445 1937", + email="jane.doe@gmail.com") + contact2.put() + +An important thing to note here is that to perform any work in the underlying +Cloud Store, a client context has to be active. After the ``ndb`` client is +initialized, we get the current context using the +`ndb.google.Client.context` method. Then, we "activate" the context by +using Python's context manager mechanisms. Now, we can safely create the +entities, which are in turn stored using the put() method. + +.. note:: + + For all the following examples, please assume that the context + activation code precedes any ``ndb`` interactions. + +In this example, since we didn't specify a parent, both entities are going to +be part of the *root* entity group. Let's say we want to have separate contact +groups, like "home" or "work". In this case, we can specify a parent, in the +form of an ancestor key, using ``ndb``'s `google.cloud.ndb.Key` class:: + + ancestor_key = ndb.Key("ContactGroup", "work") + contact1 = Contact(parent=ancestor_key, + name="John Smith", + phone="555 617 8993", + email="john.smith@gmail.com") + contact1.put() + contact2 = Contact(parent=ancestor_key, + name="Jane Doe", + phone="555 445 1937", + email="jane.doe@gmail.com") + contact2.put() + +A `key` is composed of a pair of ``(kind, id)`` values. The kind gives the +id of the entity that this key refers to, and the id is the name that we want +to associate with this key. Note that it's not mandatory to have the kind class +defined previously in the code for this to work. + +This covers the basics for storing content in the Cloud Database. If you go to +the Administration Console for your project, you should see the entities that +were just created. Select "Datastore" from the Storage section of the Google +Cloud Platform menu, then "Entities", to get to the entity search page. + +Queries and Indexes +=================== + +Now that we have some entities safely stored, let's see how to get them out. An +application can query to find entities that match some filters:: + + query = Contact.query() + names = [c.name for c in query] + +A typical ``ndb`` query filters entities by kind. In this example, we use a +shortcut from the Model class that generates a query that returns all Contact +entities. A query can also specify filters on entity property values and keys. + +A query can specify sort order. If a given entity has at least one (possibly +null) value for every property in the filters and sort orders and all the +filter criteria are met by the property values, then that entity is returned as +a result. + +In the previous section, we stored some contacts using an ancestor key. Using +that key, we can find only entities that "belong to" some ancestor:: + + ancestor_key = ndb.Key("ContactGroup", "work") + query = Contact.query(ancestor=ancestor_key) + names = [c.name for c in query] + +While the first query example returns all four stored contacts, this last one +only returns those stored under the "work" contact group. + +There are many useful operations that can be done on a query. For example, to +get results ordered by name:: + + query = Contact.query().order(Contact.name) + names = [c.name for c in query] + +You can also filter the results:: + + query = Contact.query().filter(Contact.name == "John Smith") + names = [c.name for c in query] + +Every query uses an index, a table that contains the results for the query in +the desired order. The underlying Datastore automatically maintains simple +indexes (indexes that use only one property). + +You can define complex indexes in a configuration file, `index.yaml +`_. When starting +out with complex indexes, the easiest way to define them is by attempting a +complex query from your application or from the command line. When Datastore +encounters queries that do not yet have indexes configured, it will generate an +error stating that no matching index was found, and it will include the +recommended (and correct) index syntax as part of the error message. + +For example, the following Contact query will generate an error, since we are +using more than one property:: + + query = Contact.query().order(Contact.name, Contact.email) + names = [c.name for c in query] + +This will show an error like the following. Look for the text "recommended +index is" to find the index properties that you need:: + + debug_error_string = "{"created":"@1560413351.069418472", + "description":"Error received from peer ipv6:[2607:f8b0:4012 + :809::200a]:443","file": "src/core/lib/surface/call.cc", + "file_line":1046,"grpc_message":"no matching index found. + recommended index is:\n- kind: Contact\n properties:\n - name: + name\n - name: email\n","grpc_status":9}" + +From this error, you would get the following index description:: + + - kind: Contact + properties: + - name: name + - name: email + +Add your new indexes to a local `index.yaml` file. When you have them all, you +can add them to your project using the `gcloud` command from the `Google Cloud +SDK `_:: + + gcloud datastore indexes create path/to/index.yaml + +If your datastore has many entities, it takes a long time to create a new index +for them; in this case, it's wise to update the index definitions before +uploading code that uses the new index. You can use the "Datastore" control +panel to find out when the indexes have finished building. + +This index mechanism supports a wide range of queries and is suitable for most +applications. However, it does not support some kinds of queries common in +other database technologies. In particular, joins aren't supported. diff --git a/packages/google-cloud-ndb/docs/key.rst b/packages/google-cloud-ndb/docs/key.rst new file mode 100644 index 000000000000..3b3addcd61c1 --- /dev/null +++ b/packages/google-cloud-ndb/docs/key.rst @@ -0,0 +1,9 @@ +### +Key +### + +.. automodule:: google.cloud.ndb.key + :members: + :inherited-members: + :undoc-members: + :show-inheritance: diff --git a/packages/google-cloud-ndb/docs/metadata.rst b/packages/google-cloud-ndb/docs/metadata.rst new file mode 100644 index 000000000000..3e5980092b46 --- /dev/null +++ b/packages/google-cloud-ndb/docs/metadata.rst @@ -0,0 +1,8 @@ +################## +Datastore Metadata +################## + +.. automodule:: google.cloud.ndb.metadata + :members: + :inherited-members: + :show-inheritance: diff --git a/packages/google-cloud-ndb/docs/migrating.rst b/packages/google-cloud-ndb/docs/migrating.rst new file mode 100644 index 000000000000..b71d888696d7 --- /dev/null +++ b/packages/google-cloud-ndb/docs/migrating.rst @@ -0,0 +1,278 @@ +###################################### +Migrating from Python 2 version of NDB +###################################### + +While every attempt has been made to keep compatibility with the previous +version of `ndb`, there are fundamental differences at the platform level, +which have made necessary in some cases to depart from the original +implementation, and sometimes even to remove existing functionality +altogether. + +One of the main objectives of this rewrite was to enable `ndb` for use in any +Python environment, not just Google App Engine. As a result, many of the `ndb` +APIs that relied on GAE environment and runtime variables, resources, and +legacy APIs have been dropped. + +Aside from this, there are many differences between the Datastore APIs +provided by GAE and those provided by the newer Google Cloud Platform. These +differences have required some code and API changes as well. + +Finally, in many cases, new features of Python 3 have eliminated the need for +some code, particularly from the old `utils` module. + +If you are migrating code, these changes can generate some confusion. This +document will cover the most common migration issues. + +Setting up a connection +======================= + +The most important difference from the previous `ndb` version, is that the new +`ndb` requires the use of a client to set up a runtime context for a project. +This is necessary because `ndb` can now be used in any Python environment, so +we can no longer assume it's running in the context of a GAE request. + +The `ndb` client uses ``google.auth`` for authentication, consistent with other +Google Cloud Platform client libraries. The client can take a `credentials` +parameter or get the credentials using the `GOOGLE_APPLICATION_CREDENTIALS` +environment variable, which is the recommended option. For more information +about authentication, consult the `Cloud Storage Client Libraries +`_ documentation. + +After instantiating a client, it's necessary to establish a runtime context, +using the ``Client.context`` method. All interactions with the database must +be within the context obtained from this call:: + + from google.cloud import ndb + + client = ndb.Client() + + with client.context() as context: + do_something_with_ndb() + +The context is not thread safe, so for threaded applications, you need to +generate one context per thread. This is particularly important for web +applications, where the best practice would be to generate a context per +request. However, please note that for cases where multiple threads are used +for a single request, a new context should be generated for every thread that +will use the `ndb` library. + +The following code shows how to use the context in a threaded application:: + + import threading + from google.cloud import datastore + from google.cloud import ndb + + client = ndb.Client() + + class Test(ndb.Model): + name = ndb.StringProperty() + + def insert(input_name): + with client.context(): + t = Test(name=input_name) + t.put() + + thread1 = threading.Thread(target=insert, args=['John']) + thread2 = threading.Thread(target=insert, args=['Bob']) + + thread1.start() + thread2.start() + +Note that the examples above are assuming the google credentials are set in +the environment. + +Keys +==== + +There are some methods from the ``key`` module that are not implemented in +this version of `ndb`: + + - Key.from_old_key. + - Key.to_old_key. + +These methods were used to pass keys to and from the `db` Datastore API, which +is no longer supported (`db` was `ndb`'s predecessor). + +Models +====== + +There are some methods from the ``model`` module that are not implemented in +this version of `ndb`. This is because getting the indexes relied on GAE +context functionality: + + - get_indexes. + - get_indexes_async. + +Properties +========== + +There are various small changes in some of the model properties that might +trip you up when migrating code. Here are some of them, for quick reference: + +- The `BlobProperty` constructor only sets `_compressed` if explicitly + passed. The original set `_compressed` always. +- In the exact same fashion the `JsonProperty` constructor only sets + `_json_type` if explicitly passed. +- Similarly, the `DateTimeProperty` constructor only sets `_auto_now` and + `_auto_now_add` if explicitly passed. +- `TextProperty(indexed=True)` and `StringProperty(indexed=False)` are no + longer supported. That is, TextProperty can no longer be indexed, whereas + StringProperty is always indexed. +- The `Property()` constructor (and subclasses) originally accepted both + `unicode` and `str` (the Python 2 versions) for `name` (and `kind`) but now + only accept `str`. + +QueryOptions and Query Order +============================ + +The QueryOptions class from ``google.cloud.ndb.query``, has been reimplemented, +since ``google.appengine.datastore.datastore_rpc.Configuration`` is no longer +available. It still uses the same signature, but does not support original +Configuration methods. + +Similarly, because ``google.appengine.datastore.datastore_query.Order`` is no +longer available, the ``ndb.query.PropertyOrder`` class has been created to +replace it. + +MessageProperty and EnumProperty +================================ + +These properties, from the ``ndb.msgprop`` module, depend on the Google +Protocol RPC Library, or `protorpc`, which is not an `ndb` dependency. For +this reason, they are not part of this version of `ndb`. + +Tasklets +======== + +When writing a `tasklet`, it is no longer necessary to raise a Return +exception for returning the result. A normal return can be used instead:: + + @ndb.tasklet + def get_cart(): + cart = yield CartItem.query().fetch_async() + return cart + +Note that "raise Return(cart)" can still be used, but it's not recommended. + +There are some methods from the ``tasklet`` module that are not implemented in +this version of `ndb`, mainly because of changes in how an `ndb` context is +created and used in this version: + + - add_flow_exception. + - make_context. + - make_default_context. + - QueueFuture. + - ReducedFuture. + - SerialQueueFuture. + - set_context. + +ndb.utils +========= + +The previous version of `ndb` included an ``ndb.utils`` module, which defined +a number of methods that were mostly used internally. Some of those have been +made obsolete by new Python 3 features, while others have been discarded due +to implementation differences in the new `ndb`. + +Possibly the most used utility from this module outside of `ndb` code is the +``positional`` decorator, which declares that only the first `n` arguments of +a function or method may be positional. Python 3 can do this using keyword-only +arguments. What used to be written as:: + + @utils.positional(2) + def function1(arg1, arg2, arg3=None, arg4=None): + pass + +Should be written like this in Python 3:: + + def function1(arg1, arg2, *, arg3=None, arg4=None): + pass + +However, ``positional`` remains available and works in Python 3. + +Exceptions +========== + +App Engine's legacy exceptions are no longer available, but `ndb` provides +shims for most of them, which can be imported from the `ndb.exceptions` +package, like this:: + + from google.cloud.ndb.exceptions import BadRequestError, BadArgumentError + +Datastore API +============= + +There are many differences between the current Datastore API and the legacy App +Engine Datastore. In most cases, where the public API was generally used, this +should not be a problem. However, if you relied in your code on the private +Datastore API, the code that does this will probably need to be rewritten. + +Specifically, the old NDB library included some undocumented APIs that dealt +directly with Datastore protocol buffers. These APIs will no longer work. +Rewrite any code that used the following classes, properties, or methods: + + - ModelAdapter + - Property._db_get_value, Property._db_set_value. + - Property._db_set_compressed_meaning and + Property._db_set_uncompressed_meaning. + - Model._deserialize and Model._serialize. + - model.make_connection. + +Default Namespace +================= + +In the previous version, ``google.appengine.api.namespacemanager`` was used +to determine the default namespace when not passed in to constructors which +require it, like ``Key``. In this version, the client class can be instantiated +with a namespace, which will be used as the default whenever it's not included +in the constructor or method arguments that expect a namespace:: + + from google.cloud import ndb + + client=ndb.Client(namespace="my namespace") + + with client.context() as context: + key = ndb.Key("SomeKind", "SomeId") + +In this example, the key will be created under the namespace `my namespace`, +because that's the namespace passed in when setting up the client. + +Django Middleware +================= + +The Django middleware that was part of the GAE version of `ndb` has been +discontinued and is no longer available in current `ndb`. The middleware +basically took care of setting the context, which can be accomplished on +modern Django with a simple class middleware, similar to this:: + + from google.cloud import ndb + + class NDBMiddleware(object): + def __init__(self, get_response): + self.get_response = get_response + self.client = ndb.Client() + + def __call__(self, request): + context = self.client.context() + request.ndb_context = context + with context: + response = self.get_response(request) + return response + +The ``__init__`` method is called only once, during server start, so it's a +good place to create and store an `ndb` client. As mentioned above, the +recommended practice is to have one context per request, so the ``__call__`` +method, which is called once per request, is an ideal place to create it. +After we have the context, we add it to the request, right before the response +is processed. The context will then be available in view and template code. +Finally, we use the ``with`` statement to generate the response within our +context. + +Another way to get an `ndb` context into a request, would be to use a `context +processor`, but those are functions called for every request, which means we +would need to initialize the client and context on each request, or find +another way to initialize and get the initial client. + +Note that the above code, like other `ndb` code, assumes the presence of the +`GOOGLE_APPLICATION_CREDENTIALS` environment variable when the client is +created. See Django documentation for details on setting up the environment. diff --git a/packages/google-cloud-ndb/docs/model.rst b/packages/google-cloud-ndb/docs/model.rst new file mode 100644 index 000000000000..8d6a28a40e82 --- /dev/null +++ b/packages/google-cloud-ndb/docs/model.rst @@ -0,0 +1,9 @@ +################## +Model and Property +################## + +.. automodule:: google.cloud.ndb.model + :members: + :exclude-members: Key, Rollback + :undoc-members: + :show-inheritance: diff --git a/packages/google-cloud-ndb/docs/msgprop.rst b/packages/google-cloud-ndb/docs/msgprop.rst new file mode 100644 index 000000000000..06e4e843b003 --- /dev/null +++ b/packages/google-cloud-ndb/docs/msgprop.rst @@ -0,0 +1,9 @@ +########################### +ProtoRPC Message Properties +########################### + +.. automodule:: google.cloud.ndb.msgprop + :members: + :inherited-members: + :undoc-members: + :show-inheritance: diff --git a/packages/google-cloud-ndb/docs/polymodel.rst b/packages/google-cloud-ndb/docs/polymodel.rst new file mode 100644 index 000000000000..2eee855e5d7c --- /dev/null +++ b/packages/google-cloud-ndb/docs/polymodel.rst @@ -0,0 +1,8 @@ +############################## +Polymorphic Models and Queries +############################## + +.. automodule:: google.cloud.ndb.polymodel + :members: + :inherited-members: + :show-inheritance: diff --git a/packages/google-cloud-ndb/docs/query.rst b/packages/google-cloud-ndb/docs/query.rst new file mode 100644 index 000000000000..860d190a061e --- /dev/null +++ b/packages/google-cloud-ndb/docs/query.rst @@ -0,0 +1,9 @@ +##### +Query +##### + +.. automodule:: google.cloud.ndb.query + :members: + :inherited-members: + :undoc-members: + :show-inheritance: diff --git a/packages/google-cloud-ndb/docs/spelling_wordlist.txt b/packages/google-cloud-ndb/docs/spelling_wordlist.txt new file mode 100644 index 000000000000..8c3b400d0c04 --- /dev/null +++ b/packages/google-cloud-ndb/docs/spelling_wordlist.txt @@ -0,0 +1,101 @@ +Admin +api +App +app +Appengine +appengine +Args +args +async +auth +backend +Blobstore +blobstore +bool +boolean +builtin +composable +Datastore +datastore +deserialize +deserialized +Dict +Django +Expando +expando +fallback +Firestore +func +google +gRPC +gql +gVisor +indices +instantiation +iter +iterable +lookups +marshalling +memcache +Metaclass +metaclass +Metaclasses +metaclasses +Metadata +metadata +meth +middleware +MultiFuture +multitenancy +Namespace +Namespaces +namespace +namespaces +NDB +ndb +NoLongerImplementedError +OAuth +offline +param +polymorphism +Pre +pre +prefetch +protobuf +proxied +QueryOptions +reimplemented +Redis +RequestHandler +runtime +schemas +stackable +StringProperty +subattribute +subclassed +subclasses +subclassing +subentities +subentity +subproperties +subproperty +superset +Tasklet +tasklet +Tasklets +tasklets +timestamp +toplevel +Transactionally +unary +unicode +unindexed +unpickled +unpickling +urlsafe +username +UTF +utils +webapp +websafe +validator diff --git a/packages/google-cloud-ndb/docs/stats.rst b/packages/google-cloud-ndb/docs/stats.rst new file mode 100644 index 000000000000..6f76e3326980 --- /dev/null +++ b/packages/google-cloud-ndb/docs/stats.rst @@ -0,0 +1,8 @@ +#################### +Datastore Statistics +#################### + +.. automodule:: google.cloud.ndb.stats + :members: + :inherited-members: + :show-inheritance: diff --git a/packages/google-cloud-ndb/docs/tasklets.rst b/packages/google-cloud-ndb/docs/tasklets.rst new file mode 100644 index 000000000000..5b8733669656 --- /dev/null +++ b/packages/google-cloud-ndb/docs/tasklets.rst @@ -0,0 +1,9 @@ +######## +Tasklets +######## + +.. automodule:: google.cloud.ndb.tasklets + :members: + :exclude-members: + :undoc-members: + :show-inheritance: diff --git a/packages/google-cloud-ndb/google/cloud/ndb/__init__.py b/packages/google-cloud-ndb/google/cloud/ndb/__init__.py new file mode 100644 index 000000000000..3375db72e07b --- /dev/null +++ b/packages/google-cloud-ndb/google/cloud/ndb/__init__.py @@ -0,0 +1,239 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""``ndb`` is a library for Google Cloud Firestore in Datastore Mode and Google Cloud Datastore. + +It was originally included in the Google App Engine runtime as a "new" +version of the ``db`` API (hence ``ndb``). + +.. autodata:: __version__ +.. autodata:: __all__ +""" + +from google.cloud.ndb import version + +__version__ = version.__version__ + +from google.cloud.ndb.client import Client +from google.cloud.ndb.context import AutoBatcher +from google.cloud.ndb.context import Context +from google.cloud.ndb.context import ContextOptions +from google.cloud.ndb.context import get_context +from google.cloud.ndb.context import get_toplevel_context +from google.cloud.ndb.context import TransactionOptions +from google.cloud.ndb._datastore_api import EVENTUAL +from google.cloud.ndb._datastore_api import EVENTUAL_CONSISTENCY +from google.cloud.ndb._datastore_api import STRONG +from google.cloud.ndb._datastore_query import Cursor +from google.cloud.ndb._datastore_query import QueryIterator +from google.cloud.ndb.global_cache import GlobalCache +from google.cloud.ndb.global_cache import MemcacheCache +from google.cloud.ndb.global_cache import RedisCache +from google.cloud.ndb.key import Key +from google.cloud.ndb.model import BlobKey +from google.cloud.ndb.model import BlobKeyProperty +from google.cloud.ndb.model import BlobProperty +from google.cloud.ndb.model import BooleanProperty +from google.cloud.ndb.model import ComputedProperty +from google.cloud.ndb.model import ComputedPropertyError +from google.cloud.ndb.model import DateProperty +from google.cloud.ndb.model import DateTimeProperty +from google.cloud.ndb.model import delete_multi +from google.cloud.ndb.model import delete_multi_async +from google.cloud.ndb.model import Expando +from google.cloud.ndb.model import FloatProperty +from google.cloud.ndb.model import GenericProperty +from google.cloud.ndb.model import GeoPt +from google.cloud.ndb.model import GeoPtProperty +from google.cloud.ndb.model import get_indexes +from google.cloud.ndb.model import get_indexes_async +from google.cloud.ndb.model import get_multi +from google.cloud.ndb.model import get_multi_async +from google.cloud.ndb.model import Index +from google.cloud.ndb.model import IndexProperty +from google.cloud.ndb.model import IndexState +from google.cloud.ndb.model import IntegerProperty +from google.cloud.ndb.model import InvalidPropertyError +from google.cloud.ndb.model import BadProjectionError +from google.cloud.ndb.model import JsonProperty +from google.cloud.ndb.model import KeyProperty +from google.cloud.ndb.model import KindError +from google.cloud.ndb.model import LocalStructuredProperty +from google.cloud.ndb.model import make_connection +from google.cloud.ndb.model import MetaModel +from google.cloud.ndb.model import Model +from google.cloud.ndb.model import ModelAdapter +from google.cloud.ndb.model import ModelAttribute +from google.cloud.ndb.model import ModelKey +from google.cloud.ndb.model import PickleProperty +from google.cloud.ndb.model import Property +from google.cloud.ndb.model import put_multi +from google.cloud.ndb.model import put_multi_async +from google.cloud.ndb.model import ReadonlyPropertyError +from google.cloud.ndb.model import Rollback +from google.cloud.ndb.model import StringProperty +from google.cloud.ndb.model import StructuredProperty +from google.cloud.ndb.model import TextProperty +from google.cloud.ndb.model import TimeProperty +from google.cloud.ndb.model import UnprojectedPropertyError +from google.cloud.ndb.model import User +from google.cloud.ndb.model import UserNotFoundError +from google.cloud.ndb.model import UserProperty +from google.cloud.ndb.polymodel import PolyModel +from google.cloud.ndb.query import ConjunctionNode +from google.cloud.ndb.query import AND +from google.cloud.ndb.query import DisjunctionNode +from google.cloud.ndb.query import OR +from google.cloud.ndb.query import FalseNode +from google.cloud.ndb.query import FilterNode +from google.cloud.ndb.query import gql +from google.cloud.ndb.query import Node +from google.cloud.ndb.query import Parameter +from google.cloud.ndb.query import ParameterizedFunction +from google.cloud.ndb.query import ParameterizedThing +from google.cloud.ndb.query import ParameterNode +from google.cloud.ndb.query import PostFilterNode +from google.cloud.ndb.query import Query +from google.cloud.ndb.query import QueryOptions +from google.cloud.ndb.query import RepeatedStructuredPropertyPredicate +from google.cloud.ndb.tasklets import add_flow_exception +from google.cloud.ndb.tasklets import Future +from google.cloud.ndb.tasklets import make_context +from google.cloud.ndb.tasklets import make_default_context +from google.cloud.ndb.tasklets import QueueFuture +from google.cloud.ndb.tasklets import ReducingFuture +from google.cloud.ndb.tasklets import Return +from google.cloud.ndb.tasklets import SerialQueueFuture +from google.cloud.ndb.tasklets import set_context +from google.cloud.ndb.tasklets import sleep +from google.cloud.ndb.tasklets import synctasklet +from google.cloud.ndb.tasklets import tasklet +from google.cloud.ndb.tasklets import toplevel +from google.cloud.ndb.tasklets import wait_all +from google.cloud.ndb.tasklets import wait_any +from google.cloud.ndb._transaction import in_transaction +from google.cloud.ndb._transaction import transaction +from google.cloud.ndb._transaction import transaction_async +from google.cloud.ndb._transaction import transactional +from google.cloud.ndb._transaction import transactional_async +from google.cloud.ndb._transaction import transactional_tasklet +from google.cloud.ndb._transaction import non_transactional + +__all__ = [ + "__version__", + "AutoBatcher", + "Client", + "Context", + "ContextOptions", + "EVENTUAL", + "EVENTUAL_CONSISTENCY", + "STRONG", + "TransactionOptions", + "Key", + "BlobKey", + "BlobKeyProperty", + "BlobProperty", + "BooleanProperty", + "ComputedProperty", + "ComputedPropertyError", + "DateProperty", + "DateTimeProperty", + "delete_multi", + "delete_multi_async", + "Expando", + "FloatProperty", + "GenericProperty", + "GeoPt", + "GeoPtProperty", + "get_indexes", + "get_indexes_async", + "get_multi", + "get_multi_async", + "GlobalCache", + "in_transaction", + "Index", + "IndexProperty", + "IndexState", + "IntegerProperty", + "InvalidPropertyError", + "BadProjectionError", + "JsonProperty", + "KeyProperty", + "KindError", + "LocalStructuredProperty", + "make_connection", + "MemcacheCache", + "MetaModel", + "Model", + "ModelAdapter", + "ModelAttribute", + "ModelKey", + "non_transactional", + "PickleProperty", + "PolyModel", + "Property", + "put_multi", + "put_multi_async", + "ReadonlyPropertyError", + "RedisCache", + "Rollback", + "StringProperty", + "StructuredProperty", + "TextProperty", + "TimeProperty", + "transaction", + "transaction_async", + "transactional", + "transactional_async", + "transactional_tasklet", + "UnprojectedPropertyError", + "User", + "UserNotFoundError", + "UserProperty", + "ConjunctionNode", + "AND", + "Cursor", + "DisjunctionNode", + "OR", + "FalseNode", + "FilterNode", + "gql", + "Node", + "Parameter", + "ParameterizedFunction", + "ParameterizedThing", + "ParameterNode", + "PostFilterNode", + "Query", + "QueryIterator", + "QueryOptions", + "RepeatedStructuredPropertyPredicate", + "add_flow_exception", + "Future", + "get_context", + "get_toplevel_context", + "make_context", + "make_default_context", + "QueueFuture", + "ReducingFuture", + "Return", + "SerialQueueFuture", + "set_context", + "sleep", + "synctasklet", + "tasklet", + "toplevel", + "wait_all", + "wait_any", +] diff --git a/packages/google-cloud-ndb/google/cloud/ndb/_batch.py b/packages/google-cloud-ndb/google/cloud/ndb/_batch.py new file mode 100644 index 000000000000..454f9b701ed3 --- /dev/null +++ b/packages/google-cloud-ndb/google/cloud/ndb/_batch.py @@ -0,0 +1,66 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Support for batching operations.""" + + +def get_batch(batch_cls, options=None): + """Gets a data structure for storing batched calls to Datastore Lookup. + + The batch data structure is stored in the current context. If there is + not already a batch started, a new structure is created and an idle + callback is added to the current event loop which will eventually perform + the batch look up. + + Args: + batch_cls (type): Class representing the kind of operation being + batched. + options (_options.ReadOptions): The options for the request. Calls with + different options will be placed in different batches. + + Returns: + batch_cls: An instance of the batch class. + """ + # prevent circular import in Python 2.7 + from google.cloud.ndb import context as context_module + + context = context_module.get_context() + batches = context.batches.get(batch_cls) + if batches is None: + context.batches[batch_cls] = batches = {} + + if options is not None: + options_key = tuple( + sorted( + ((key, value) for key, value in options.items() if value is not None) + ) + ) + else: + options_key = () + + batch = batches.get(options_key) + if batch is not None and not batch.full(): + return batch + + def idler(batch): + def idle(): + if batches.get(options_key) is batch: + del batches[options_key] + batch.idle_callback() + + return idle + + batches[options_key] = batch = batch_cls(options) + context.eventloop.add_idle(idler(batch)) + return batch diff --git a/packages/google-cloud-ndb/google/cloud/ndb/_cache.py b/packages/google-cloud-ndb/google/cloud/ndb/_cache.py new file mode 100644 index 000000000000..40be51190bef --- /dev/null +++ b/packages/google-cloud-ndb/google/cloud/ndb/_cache.py @@ -0,0 +1,741 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import functools +import itertools +import logging +import uuid +import warnings + +from google.api_core import retry as core_retry + +from google.cloud.ndb import _batch +from google.cloud.ndb import context as context_module +from google.cloud.ndb import tasklets +from google.cloud.ndb import utils + +_LOCKED_FOR_READ = b"0-" +_LOCKED_FOR_WRITE = b"00" +_LOCK_TIME = 64 +_PREFIX = b"NDB30" + +warnings.filterwarnings("always", module=__name__) +log = logging.getLogger(__name__) + + +class ContextCache(dict): + """A per-context in-memory entity cache. + + This cache verifies the fetched entity has the correct key before + returning a result, in order to handle cases where the entity's key was + modified but the cache's key was not updated. + """ + + def get_and_validate(self, key): + """Verify that the entity's key has not changed since it was added + to the cache. If it has changed, consider this a cache miss. + See issue 13. http://goo.gl/jxjOP""" + entity = self[key] # May be None, meaning "doesn't exist". + if entity is None or entity._key == key: + return entity + else: + del self[key] + raise KeyError(key) + + def __repr__(self): + return "ContextCache()" + + +def _future_result(result): + """Returns a completed Future with the given result. + + For conforming to the asynchronous interface even if we've gotten the + result synchronously. + """ + future = tasklets.Future() + future.set_result(result) + return future + + +def _future_exception(error): + """Returns a completed Future with the given exception. + + For conforming to the asynchronous interface even if we've gotten the + result synchronously. + """ + future = tasklets.Future() + future.set_exception(error) + return future + + +def _global_cache(): + """Returns the global cache for the current context.""" + return context_module.get_context().global_cache + + +class _GlobalCacheBatch(object): + """Abstract base for classes used to batch operations for the global cache.""" + + def full(self): + """Indicates whether more work can be added to this batch. + + Returns: + boolean: `False`, always. + """ + return False + + def idle_callback(self): + """Call the cache operation. + + Also, schedule a callback for the completed operation. + """ + try: + cache_call = self.make_call() + if not isinstance(cache_call, tasklets.Future): + cache_call = _future_result(cache_call) + except Exception as error: + cache_call = _future_exception(error) + + cache_call.add_done_callback(self.done_callback) + + def done_callback(self, cache_call): + """Process results of call to global cache. + + If there is an exception for the cache call, distribute that to waiting + futures, otherwise set the result for all waiting futures to ``None``. + """ + exception = cache_call.exception() + if exception: + for future in self.futures: + future.set_exception(exception) + + else: + for future in self.futures: + future.set_result(None) + + def make_call(self): + """Make the actual call to the global cache. To be overridden.""" + raise NotImplementedError + + def future_info(self, key): + """Generate info string for Future. To be overridden.""" + raise NotImplementedError + + +def _handle_transient_errors(read=False): + """Decorator for global_XXX functions for handling transient errors. + + Will log as warning or reraise transient errors according to `strict_read` and + `strict_write` attributes of the global cache and whether the operation is a read or + a write. + + If in strict mode, will retry the wrapped function up to 5 times before reraising + the transient error. + """ + + def wrap(wrapped): + def retry(wrapped, transient_errors): + @functools.wraps(wrapped) + @tasklets.tasklet + def retry_wrapper(key, *args, **kwargs): + sleep_generator = core_retry.exponential_sleep_generator(0.1, 1) + attempts = 5 + for sleep_time in sleep_generator: # pragma: NO BRANCH + # pragma is required because loop never exits normally, it only gets + # raised out of. + attempts -= 1 + try: + result = yield wrapped(key, *args, **kwargs) + raise tasklets.Return(result) + except transient_errors: + if not attempts: + raise + + yield tasklets.sleep(sleep_time) + + return retry_wrapper + + @functools.wraps(wrapped) + @tasklets.tasklet + def wrapper(key, *args, **kwargs): + cache = _global_cache() + + is_read = read + if not is_read: + is_read = kwargs.get("read", False) + + strict = cache.strict_read if is_read else cache.strict_write + if strict: + function = retry(wrapped, cache.transient_errors) + else: + function = wrapped + + try: + result = yield function(key, *args, **kwargs) + raise tasklets.Return(result) + + except cache.transient_errors as error: + if strict: + raise + + if not getattr(error, "_ndb_warning_logged", False): + # Same exception will be sent to every future in the batch. Only + # need to log one warning, though. + warnings.warn( + "Error connecting to global cache: {}".format(error), + RuntimeWarning, + ) + error._ndb_warning_logged = True + + raise tasklets.Return(None) + + return wrapper + + return wrap + + +def _global_get(key): + """Get entity from global cache. + + Args: + key (bytes): The key to get. + + Returns: + tasklets.Future: Eventual result will be the entity (``bytes``) or + ``None``. + """ + batch = _batch.get_batch(_GlobalCacheGetBatch) + return batch.add(key) + + +global_get = _handle_transient_errors(read=True)(_global_get) + + +class _GlobalCacheGetBatch(_GlobalCacheBatch): + """Batch for global cache get requests. + + Attributes: + todo (Dict[bytes, List[Future]]): Mapping of keys to futures that are + waiting on them. + + Arguments: + ignore_options (Any): Ignored. + """ + + def __init__(self, ignore_options): + self.todo = {} + self.keys = [] + + def add(self, key): + """Add a key to get from the cache. + + Arguments: + key (bytes): The key to get from the cache. + + Returns: + tasklets.Future: Eventual result will be the entity retrieved from + the cache (``bytes``) or ``None``. + """ + future = tasklets.Future(info=self.future_info(key)) + futures = self.todo.get(key) + if futures is None: + self.todo[key] = futures = [] + self.keys.append(key) + futures.append(future) + return future + + def done_callback(self, cache_call): + """Process results of call to global cache. + + If there is an exception for the cache call, distribute that to waiting + futures, otherwise distribute cache hits or misses to their respective + waiting futures. + """ + exception = cache_call.exception() + if exception: + for future in itertools.chain(*self.todo.values()): + future.set_exception(exception) + + return + + results = cache_call.result() + for key, result in zip(self.keys, results): + futures = self.todo[key] + for future in futures: + future.set_result(result) + + def make_call(self): + """Call :method:`GlobalCache.get`.""" + return _global_cache().get(self.keys) + + def future_info(self, key): + """Generate info string for Future.""" + return "GlobalCache.get({})".format(key) + + +@_handle_transient_errors() +def global_set(key, value, expires=None, read=False): + """Store entity in the global cache. + + Args: + key (bytes): The key to save. + value (bytes): The entity to save. + expires (Optional[float]): Number of seconds until value expires. + read (bool): Indicates if being set in a read (lookup) context. + + Returns: + tasklets.Future: Eventual result will be ``None``. + """ + options = {} + if expires: + options = {"expires": expires} + + batch = _batch.get_batch(_GlobalCacheSetBatch, options) + return batch.add(key, value) + + +class _GlobalCacheSetBatch(_GlobalCacheBatch): + """Batch for global cache set requests.""" + + def __init__(self, options): + self.expires = options.get("expires") + self.todo = {} + self.futures = {} + + def done_callback(self, cache_call): + """Process results of call to global cache. + + If there is an exception for the cache call, distribute that to waiting + futures, otherwise examine the result of the cache call. If the result is + :data:`None`, simply set the result to :data:`None` for all waiting futures. + Otherwise, if the result is a `dict`, use that to propagate results for + individual keys to waiting futures. + """ + exception = cache_call.exception() + if exception: + for future in self.futures.values(): + future.set_exception(exception) + return + + result = cache_call.result() + if result: + for key, future in self.futures.items(): + key_result = result.get(key, None) + if isinstance(key_result, Exception): + future.set_exception(key_result) + else: + future.set_result(key_result) + else: + for future in self.futures.values(): + future.set_result(None) + + def add(self, key, value): + """Add a key, value pair to store in the cache. + + Arguments: + key (bytes): The key to store in the cache. + value (bytes): The value to store in the cache. + + Returns: + tasklets.Future: Eventual result will be ``None``. + """ + future = self.futures.get(key) + if future: + if self.todo[key] != value: + # I don't think this is likely to happen. I'd like to know about it if + # it does because that might indicate a bad software design. + future = tasklets.Future() + future.set_exception( + RuntimeError( + "Key has already been set in this batch: {}".format(key) + ) + ) + + return future + + future = tasklets.Future(info=self.future_info(key, value)) + self.todo[key] = value + self.futures[key] = future + return future + + def make_call(self): + """Call :method:`GlobalCache.set`.""" + return _global_cache().set(self.todo, expires=self.expires) + + def future_info(self, key, value): + """Generate info string for Future.""" + return "GlobalCache.set({}, {})".format(key, value) + + +@tasklets.tasklet +def global_set_if_not_exists(key, value, expires=None): + """Store entity in the global cache if key is not already present. + + Args: + key (bytes): The key to save. + value (bytes): The entity to save. + expires (Optional[float]): Number of seconds until value expires. + + Returns: + tasklets.Future: Eventual result will be a ``bool`` value which will be + :data:`True` if a new value was set for the key, or :data:`False` if a value + was already set for the key or if a transient error occurred while + attempting to set the key. + """ + options = {} + if expires: + options = {"expires": expires} + + cache = _global_cache() + batch = _batch.get_batch(_GlobalCacheSetIfNotExistsBatch, options) + try: + success = yield batch.add(key, value) + except cache.transient_errors: + success = False + + raise tasklets.Return(success) + + +class _GlobalCacheSetIfNotExistsBatch(_GlobalCacheSetBatch): + """Batch for global cache set_if_not_exists requests.""" + + def add(self, key, value): + """Add a key, value pair to store in the cache. + + Arguments: + key (bytes): The key to store in the cache. + value (bytes): The value to store in the cache. + + Returns: + tasklets.Future: Eventual result will be a ``bool`` value which will be + :data:`True` if a new value was set for the key, or :data:`False` if a + value was already set for the key. + """ + if key in self.todo: + future = tasklets.Future() + future.set_result(False) + return future + + future = tasklets.Future(info=self.future_info(key, value)) + self.todo[key] = value + self.futures[key] = future + return future + + def make_call(self): + """Call :method:`GlobalCache.set`.""" + return _global_cache().set_if_not_exists(self.todo, expires=self.expires) + + def future_info(self, key, value): + """Generate info string for Future.""" + return "GlobalCache.set_if_not_exists({}, {})".format(key, value) + + +def _global_delete(key): + """Delete an entity from the global cache. + + Args: + key (bytes): The key to delete. + + Returns: + tasklets.Future: Eventual result will be ``None``. + """ + batch = _batch.get_batch(_GlobalCacheDeleteBatch) + return batch.add(key) + + +global_delete = _handle_transient_errors()(_global_delete) + + +class _GlobalCacheDeleteBatch(_GlobalCacheBatch): + """Batch for global cache delete requests.""" + + def __init__(self, ignore_options): + self.keys = [] + self.futures = [] + + def add(self, key): + """Add a key to delete from the cache. + + Arguments: + key (bytes): The key to delete. + + Returns: + tasklets.Future: Eventual result will be ``None``. + """ + future = tasklets.Future(info=self.future_info(key)) + self.keys.append(key) + self.futures.append(future) + return future + + def make_call(self): + """Call :method:`GlobalCache.delete`.""" + return _global_cache().delete(self.keys) + + def future_info(self, key): + """Generate info string for Future.""" + return "GlobalCache.delete({})".format(key) + + +def _global_watch(key, value): + """Start optimistic transaction with global cache. + + A future call to :func:`global_compare_and_swap` will only set the value + if the value hasn't changed in the cache since the call to this function. + + Args: + key (bytes): The key to watch. + + Returns: + tasklets.Future: Eventual result will be ``None``. + """ + batch = _batch.get_batch(_GlobalCacheWatchBatch, {}) + return batch.add(key, value) + + +global_watch = _handle_transient_errors(read=True)(_global_watch) + + +class _GlobalCacheWatchBatch(_GlobalCacheSetBatch): + """Batch for global cache watch requests.""" + + def make_call(self): + """Call :method:`GlobalCache.watch`.""" + return _global_cache().watch(self.todo) + + def future_info(self, key, value): + """Generate info string for Future.""" + return "GlobalCache.watch({}, {})".format(key, value) + + +@_handle_transient_errors() +def global_unwatch(key): + """End optimistic transaction with global cache. + + Indicates that value for the key wasn't found in the database, so there will not be + a future call to :func:`global_compare_and_swap`, and we no longer need to watch + this key. + + Args: + key (bytes): The key to unwatch. + + Returns: + tasklets.Future: Eventual result will be ``None``. + """ + batch = _batch.get_batch(_GlobalCacheUnwatchBatch, {}) + return batch.add(key) + + +class _GlobalCacheUnwatchBatch(_GlobalCacheDeleteBatch): + """Batch for global cache unwatch requests.""" + + def make_call(self): + """Call :method:`GlobalCache.unwatch`.""" + return _global_cache().unwatch(self.keys) + + def future_info(self, key): + """Generate info string for Future.""" + return "GlobalCache.unwatch({})".format(key) + + +def _global_compare_and_swap(key, value, expires=None): + """Like :func:`global_set` but using an optimistic transaction. + + Value will only be set for the given key if the value in the cache hasn't + changed since a preceding call to :func:`global_watch`. + + Args: + key (bytes): The key to save. + value (bytes): The entity to save. + expires (Optional[float]): Number of seconds until value expires. + + Returns: + tasklets.Future: Eventual result will be ``None``. + """ + options = {} + if expires: + options["expires"] = expires + + batch = _batch.get_batch(_GlobalCacheCompareAndSwapBatch, options) + return batch.add(key, value) + + +global_compare_and_swap = _handle_transient_errors(read=True)(_global_compare_and_swap) + + +class _GlobalCacheCompareAndSwapBatch(_GlobalCacheSetBatch): + """Batch for global cache compare and swap requests.""" + + def make_call(self): + """Call :method:`GlobalCache.compare_and_swap`.""" + return _global_cache().compare_and_swap(self.todo, expires=self.expires) + + def future_info(self, key, value): + """Generate info string for Future.""" + return "GlobalCache.compare_and_swap({}, {})".format(key, value) + + +@tasklets.tasklet +def global_lock_for_read(key, prev_value): + """Lock a key for a read (lookup) operation by setting a special value. + + Lock may be preempted by a parallel write (put) operation. + + Args: + key (bytes): The key to lock. + prev_value (bytes): The cache value previously read from the global cache. + Should be either :data:`None` or an empty bytes object if a key was written + recently. + + Returns: + tasklets.Future: Eventual result will be lock value (``bytes``) written to + Datastore for the given key, or :data:`None` if the lock was not acquired. + """ + lock = _LOCKED_FOR_READ + str(uuid.uuid4()).encode("ascii") + if prev_value is not None: + yield global_watch(key, prev_value) + lock_acquired = yield global_compare_and_swap(key, lock, expires=_LOCK_TIME) + else: + lock_acquired = yield global_set_if_not_exists(key, lock, expires=_LOCK_TIME) + + if lock_acquired: + raise tasklets.Return(lock) + + +@_handle_transient_errors() +@tasklets.tasklet +def global_lock_for_write(key): + """Lock a key for a write (put) operation, by setting or updating a special value. + + There can be multiple write locks for a given key. Key will only be released when + all write locks have been released. + + Args: + key (bytes): The key to lock. + + Returns: + tasklets.Future: Eventual result will be a lock value to be used later with + :func:`global_unlock`. + """ + lock = "." + str(uuid.uuid4()) + lock = lock.encode("ascii") + utils.logging_debug(log, "lock for write: {}", lock) + + def new_value(old_value): + if old_value and old_value.startswith(_LOCKED_FOR_WRITE): + return old_value + lock + + return _LOCKED_FOR_WRITE + lock + + yield _update_key(key, new_value) + + raise tasklets.Return(lock) + + +@tasklets.tasklet +def global_unlock_for_write(key, lock): + """Remove a lock for key by updating or removing a lock value. + + The lock represented by the ``lock`` argument will be released. + + Args: + key (bytes): The key to lock. + lock (bytes): The return value from the call :func:`global_lock` which acquired + the lock. + + Returns: + tasklets.Future: Eventual result will be :data:`None`. + """ + utils.logging_debug(log, "unlock for write: {}", lock) + + def new_value(old_value): + value = old_value + if value and lock in value: + value = value.replace(lock, b"") + + else: + warnings.warn( + "Attempt to remove a lock that doesn't exist. This is mostly likely " + "caused by a long running operation and the lock timing out.", + RuntimeWarning, + ) + + if value == _LOCKED_FOR_WRITE: + value = b"" + + if value and not value.startswith(_LOCKED_FOR_WRITE): + # If this happens, it means the lock expired and something else got written + # to the cache in the meantime. Whatever value that is, since there was a + # write operation that is concluding now, we should consider it stale and + # write a blank value. + value = b"" + + return value + + cache = _global_cache() + try: + yield _update_key(key, new_value) + except cache.transient_errors: + # Worst case scenario, lock sticks around for longer than we'd like + pass + + +@tasklets.tasklet +def _update_key(key, new_value): + success = False + + while not success: + old_value = yield _global_get(key) + utils.logging_debug(log, "old value: {}", old_value) + + value = new_value(old_value) + utils.logging_debug(log, "new value: {}", value) # pragma: SYNCPOINT update key + + if old_value == value: + utils.logging_debug(log, "nothing to do") + return + + if old_value is not None: + utils.logging_debug(log, "compare and swap") + yield _global_watch(key, old_value) + success = yield _global_compare_and_swap(key, value, expires=_LOCK_TIME) + + else: + utils.logging_debug(log, "set if not exists") + success = yield global_set_if_not_exists(key, value, expires=_LOCK_TIME) + + utils.logging_debug(log, "success: {}", success) + + +def is_locked_value(value): + """Check if the given value is the special reserved value for key lock. + + Returns: + bool: Whether the value is the special reserved value for key lock. + """ + if value: + return value.startswith(_LOCKED_FOR_READ) or value.startswith(_LOCKED_FOR_WRITE) + + return False + + +def global_cache_key(key): + """Convert Datastore key to ``bytes`` to use for global cache key. + + Args: + key (datastore.Key): The Datastore key. + + Returns: + bytes: The cache key. + """ + return _PREFIX + key.to_protobuf()._pb.SerializeToString() diff --git a/packages/google-cloud-ndb/google/cloud/ndb/_datastore_api.py b/packages/google-cloud-ndb/google/cloud/ndb/_datastore_api.py new file mode 100644 index 000000000000..bca130a78271 --- /dev/null +++ b/packages/google-cloud-ndb/google/cloud/ndb/_datastore_api.py @@ -0,0 +1,1160 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Functions that interact with Datastore backend.""" + +import grpc +import itertools +import logging + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.cloud.datastore import helpers +from google.cloud.datastore_v1.types import datastore as datastore_pb2 +from google.cloud.datastore_v1.types import entity as entity_pb2 + +from google.cloud.ndb import context as context_module +from google.cloud.ndb import _batch +from google.cloud.ndb import _cache +from google.cloud.ndb import _eventloop +from google.cloud.ndb import _options +from google.cloud.ndb import _remote +from google.cloud.ndb import _retry +from google.cloud.ndb import tasklets +from google.cloud.ndb import utils + +EVENTUAL = datastore_pb2.ReadOptions.ReadConsistency.EVENTUAL +EVENTUAL_CONSISTENCY = EVENTUAL # Legacy NDB +STRONG = datastore_pb2.ReadOptions.ReadConsistency.STRONG + +_DEFAULT_TIMEOUT = None +_NOT_FOUND = object() + +log = logging.getLogger(__name__) + + +def stub(): + """Get the stub for the `Google Datastore` API. + + Gets the stub from the current context. + + Returns: + :class:`~google.cloud.datastore_v1.proto.datastore_pb2_grpc.DatastoreStub`: + The stub instance. + """ + context = context_module.get_context() + return context.client.stub + + +def make_call(rpc_name, request, retries=None, timeout=None, metadata=()): + """Make a call to the Datastore API. + + Args: + rpc_name (str): Name of the remote procedure to call on Datastore. + request (Any): An appropriate request object for the call, eg, + `entity_pb2.LookupRequest` for calling ``Lookup``. + retries (int): Number of times to potentially retry the call. If + :data:`None` is passed, will use :data:`_retry._DEFAULT_RETRIES`. + If :data:`0` is passed, the call is attempted only once. + timeout (float): Timeout, in seconds, to pass to gRPC call. If + :data:`None` is passed, will use :data:`_DEFAULT_TIMEOUT`. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + tasklets.Future: Future for the eventual response for the API call. + """ + api = stub() + method = getattr(api, rpc_name) + + if retries is None: + retries = _retry._DEFAULT_RETRIES + + if timeout is None: + timeout = _DEFAULT_TIMEOUT + + @tasklets.tasklet + def rpc_call(): + context = context_module.get_toplevel_context() + + call = method.future(request, timeout=timeout, metadata=metadata) + rpc = _remote.RemoteCall(call, rpc_name) + utils.logging_debug(log, rpc) + utils.logging_debug(log, "timeout={}", timeout) + utils.logging_debug(log, request) + + try: + result = yield rpc + except Exception as error: + if isinstance(error, grpc.Call): + error = core_exceptions.from_grpc_error(error) + raise error + finally: + context.rpc_time += rpc.elapsed_time + + raise tasklets.Return(result) + + if retries: + rpc_call = _retry.retry_async(rpc_call, retries=retries) + + return rpc_call() + + +@tasklets.tasklet +def lookup(key, options): + """Look up a Datastore entity. + + Gets an entity from Datastore, asynchronously. Checks the global cache, + first, if appropriate. Uses batching. + + Args: + key (~datastore.Key): The key for the entity to retrieve. + options (_options.ReadOptions): The options for the request. For + example, ``{"read_consistency": EVENTUAL}``. + + Returns: + :class:`~tasklets.Future`: If not an exception, future's result will be + either an entity protocol buffer or _NOT_FOUND. + """ + context = context_module.get_context() + use_datastore = context._use_datastore(key, options) + if use_datastore and options.transaction: + use_global_cache = False + else: + use_global_cache = context._use_global_cache(key, options) + + if not (use_global_cache or use_datastore): + raise TypeError("use_global_cache and use_datastore can't both be False") + + entity_pb = _NOT_FOUND + key_locked = False + + if use_global_cache: + cache_key = _cache.global_cache_key(key) + result = yield _cache.global_get(cache_key) + key_locked = _cache.is_locked_value(result) + if not key_locked: + if result: + entity_pb = entity_pb2.Entity() + entity_pb._pb.MergeFromString(result) + + elif use_datastore: + lock = yield _cache.global_lock_for_read(cache_key, result) + if lock: + yield _cache.global_watch(cache_key, lock) + + else: + # Another thread locked or wrote to this key after the call to + # _cache.global_get above. Behave as though the key was locked by + # another thread and don't attempt to write our value below + key_locked = True + + if entity_pb is _NOT_FOUND and use_datastore: + batch = _batch.get_batch(_LookupBatch, options) + entity_pb = yield batch.add(key) + + # Do not cache misses + if use_global_cache and not key_locked: + if entity_pb is not _NOT_FOUND: + expires = context._global_cache_timeout(key, options) + serialized = entity_pb._pb.SerializeToString() + yield _cache.global_compare_and_swap( + cache_key, serialized, expires=expires + ) + else: + yield _cache.global_unwatch(cache_key) + + raise tasklets.Return(entity_pb) + + +class _LookupBatch(object): + """Batch for Lookup requests. + + Attributes: + options (Dict[str, Any]): See Args. + todo (Dict[bytes, List[tasklets.Future]]: Mapping of serialized key + protocol buffers to dependent futures. + + Args: + options (_options.ReadOptions): The options for the request. Calls with + different options will be placed in different batches. + """ + + def __init__(self, options): + self.options = options + self.todo = {} + + def full(self): + """Indicates whether more work can be added to this batch. + + Returns: + boolean: `True` if number of keys to be looked up has reached 1000, + else `False`. + """ + return len(self.todo) >= 1000 + + def add(self, key): + """Add a key to the batch to look up. + + Args: + key (datastore.Key): The key to look up. + + Returns: + tasklets.Future: A future for the eventual result. + """ + todo_key = key.to_protobuf()._pb.SerializeToString() + future = tasklets.Future(info="Lookup({})".format(key)) + self.todo.setdefault(todo_key, []).append(future) + return future + + def idle_callback(self): + """Perform a Datastore Lookup on all batched Lookup requests.""" + keys = [] + for todo_key in self.todo.keys(): + key_pb = entity_pb2.Key() + key_pb._pb.ParseFromString(todo_key) + keys.append(key_pb) + + read_options = get_read_options(self.options) + rpc = _datastore_lookup( + keys, + read_options, + retries=self.options.retries, + timeout=self.options.timeout, + ) + rpc.add_done_callback(self.lookup_callback) + + def lookup_callback(self, rpc): + """Process the results of a call to Datastore Lookup. + + Each key in the batch will be in one of `found`, `missing`, or + `deferred`. `found` keys have their futures' results set with the + protocol buffers for their entities. `missing` keys have their futures' + results with `_NOT_FOUND`, a sentinel value. `deferrred` keys are + loaded into a new batch so they can be tried again. + + Args: + rpc (tasklets.Future): If not an exception, the result will be + an instance of + :class:`google.cloud.datastore_v1.datastore_pb.LookupResponse` + """ + # If RPC has resulted in an exception, propagate that exception to all + # waiting futures. + exception = rpc.exception() + if exception is not None: + for future in itertools.chain(*self.todo.values()): + future.set_exception(exception) + return + + # Process results, which are divided into found, missing, and deferred + results = rpc.result() + utils.logging_debug(log, results) + + # For all deferred keys, batch them up again with their original + # futures + if results.deferred: + next_batch = _batch.get_batch(type(self), self.options) + for key in results.deferred: + todo_key = key._pb.SerializeToString() + next_batch.todo.setdefault(todo_key, []).extend(self.todo[todo_key]) + + # For all missing keys, set result to _NOT_FOUND and let callers decide + # how to handle + for result in results.missing: + todo_key = result.entity.key._pb.SerializeToString() + for future in self.todo[todo_key]: + future.set_result(_NOT_FOUND) + + # For all found entities, set the result on their corresponding futures + for result in results.found: + entity = result.entity + todo_key = entity.key._pb.SerializeToString() + for future in self.todo[todo_key]: + future.set_result(entity) + + +def _datastore_lookup(keys, read_options, retries=None, timeout=None, metadata=()): + """Issue a Lookup call to Datastore using gRPC. + + Args: + keys (Iterable[entity_pb2.Key]): The entity keys to + look up. + read_options (Union[datastore_pb2.ReadOptions, NoneType]): Options for + the request. + retries (int): Number of times to potentially retry the call. If + :data:`None` is passed, will use :data:`_retry._DEFAULT_RETRIES`. + If :data:`0` is passed, the call is attempted only once. + timeout (float): Timeout, in seconds, to pass to gRPC call. If + :data:`None` is passed, will use :data:`_DEFAULT_TIMEOUT`. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + tasklets.Future: Future object for eventual result of lookup. + """ + client = context_module.get_context().client + request = datastore_pb2.LookupRequest( + project_id=client.project, + database_id=client.database, + keys=[key for key in keys], + read_options=read_options, + ) + metadata = _add_routing_info(metadata, request) + + return make_call( + "lookup", request, retries=retries, timeout=timeout, metadata=metadata + ) + + +def get_read_options(options, default_read_consistency=None): + """Get the read options for a request. + + Args: + options (_options.ReadOptions): The options for the request. May + contain options unrelated to creating a + :class:`datastore_pb2.ReadOptions` instance, which will be ignored. + default_read_consistency: Use this value for ``read_consistency`` if + neither ``transaction`` nor ``read_consistency`` are otherwise + specified. + + Returns: + datastore_pb2.ReadOptions: The options instance for passing to the + Datastore gRPC API. + + Raises: + ValueError: When ``read_consistency`` is set to ``EVENTUAL`` and there + is a transaction. + """ + transaction = options.transaction + read_consistency = options.read_consistency + + if transaction is None: + if read_consistency is None: + read_consistency = default_read_consistency + + elif read_consistency is EVENTUAL: + raise ValueError("read_consistency must not be EVENTUAL when in transaction") + + return datastore_pb2.ReadOptions( + read_consistency=read_consistency, transaction=transaction + ) + + +@tasklets.tasklet +def put(entity, options): + """Store an entity in datastore. + + The entity can be a new entity to be saved for the first time or an + existing entity that has been updated. + + Args: + entity_pb (datastore.Entity): The entity to be stored. + options (_options.Options): Options for this request. + + Returns: + tasklets.Future: Result will be completed datastore key + (datastore.Key) for the entity. + """ + context = context_module.get_context() + use_global_cache = context._use_global_cache(entity.key, options) + use_datastore = context._use_datastore(entity.key, options) + if not (use_global_cache or use_datastore): + raise TypeError("use_global_cache and use_datastore can't both be False") + + if not use_datastore and entity.key.is_partial: + raise TypeError("Can't store partial keys when use_datastore is False") + + lock = None + entity_pb = helpers.entity_to_protobuf(entity) + cache_key = _cache.global_cache_key(entity.key) + if use_global_cache and not entity.key.is_partial: + if use_datastore: + lock = yield _cache.global_lock_for_write(cache_key) + else: + expires = context._global_cache_timeout(entity.key, options) + cache_value = entity_pb._pb.SerializeToString() + yield _cache.global_set(cache_key, cache_value, expires=expires) + + if use_datastore: + transaction = context.transaction + if transaction: + batch = _get_commit_batch(transaction, options) + else: + batch = _batch.get_batch(_NonTransactionalCommitBatch, options) + + key_pb = yield batch.put(entity_pb) + if key_pb: + key = helpers.key_from_protobuf(key_pb) + else: + key = None + + if lock: + if transaction: + + def callback(): + _cache.global_unlock_for_write(cache_key, lock).result() + + context.call_on_transaction_complete(callback) + + else: + yield _cache.global_unlock_for_write(cache_key, lock) + + raise tasklets.Return(key) + + +@tasklets.tasklet +def delete(key, options): + """Delete an entity from Datastore. + + Deleting an entity that doesn't exist does not result in an error. The + result is the same regardless. + + Args: + key (datastore.Key): The key for the entity to be deleted. + options (_options.Options): Options for this request. + + Returns: + tasklets.Future: Will be finished when entity is deleted. Result will + always be :data:`None`. + """ + context = context_module.get_context() + use_global_cache = context._use_global_cache(key, options) + use_datastore = context._use_datastore(key, options) + transaction = context.transaction + + if use_global_cache: + cache_key = _cache.global_cache_key(key) + + if use_datastore: + if use_global_cache: + lock = yield _cache.global_lock_for_write(cache_key) + + if transaction: + batch = _get_commit_batch(transaction, options) + else: + batch = _batch.get_batch(_NonTransactionalCommitBatch, options) + + yield batch.delete(key) + + if use_global_cache: + if transaction: + + def callback(): + _cache.global_unlock_for_write(cache_key, lock).result() + + context.call_on_transaction_complete(callback) + + elif use_datastore: + yield _cache.global_unlock_for_write(cache_key, lock) + + else: + yield _cache.global_delete(cache_key) + + +class _NonTransactionalCommitBatch(object): + """Batch for tracking a set of mutations for a non-transactional commit. + + Attributes: + options (_options.Options): See Args. + mutations (List[datastore_pb2.Mutation]): Sequence of mutation protocol + buffers accumumlated for this batch. + futures (List[tasklets.Future]): Sequence of futures for return results + of the commit. The i-th element of ``futures`` corresponds to the + i-th element of ``mutations``. + + Args: + options (_options.Options): The options for the request. Calls with + different options will be placed in different batches. + """ + + def __init__(self, options): + self.options = options + self.mutations = [] + self.futures = [] + + def full(self): + """Indicates whether more work can be added to this batch. + + Returns: + boolean: `True` if number of mutations has reached 500, else + `False`. + """ + return len(self.mutations) >= 500 + + def put(self, entity_pb): + """Add an entity to batch to be stored. + + Args: + entity_pb (datastore_v1.types.Entity): The entity to be stored. + + Returns: + tasklets.Future: Result will be completed datastore key + (entity_pb2.Key) for the entity. + """ + future = tasklets.Future(info="put({})".format(entity_pb)) + mutation = datastore_pb2.Mutation(upsert=entity_pb) + self.mutations.append(mutation) + self.futures.append(future) + return future + + def delete(self, key): + """Add a key to batch to be deleted. + + Args: + entity_pb (datastore.Key): The entity's key to be deleted. + + Returns: + tasklets.Future: Result will be :data:`None`, always. + """ + key_pb = key.to_protobuf() + future = tasklets.Future(info="delete({})".format(key_pb)) + mutation = datastore_pb2.Mutation(delete=key_pb) + self.mutations.append(mutation) + self.futures.append(future) + return future + + def idle_callback(self): + """Send the commit for this batch to Datastore.""" + futures = self.futures + + def commit_callback(rpc): + _process_commit(rpc, futures) + + rpc = _datastore_commit( + self.mutations, + None, + retries=self.options.retries, + timeout=self.options.timeout, + ) + rpc.add_done_callback(commit_callback) + + +def prepare_to_commit(transaction): + """Signal that we're ready to commit a transaction. + + Currently just used to signal to the commit batch that we're not going to + need to call `AllocateIds`, because we're ready to commit now. + + Args: + transaction (bytes): The transaction id about to be committed. + """ + batch = _get_commit_batch(transaction, _options.Options()) + batch.preparing_to_commit = True + + +def commit(transaction, retries=None, timeout=None): + """Commit a transaction. + + Args: + transaction (bytes): The transaction id to commit. + retries (int): Number of times to potentially retry the call. If + :data:`None` is passed, will use :data:`_retry._DEFAULT_RETRIES`. + If :data:`0` is passed, the call is attempted only once. + timeout (float): Timeout, in seconds, to pass to gRPC call. If + :data:`None` is passed, will use :data:`_DEFAULT_TIMEOUT`. + + Returns: + tasklets.Future: Result will be none, will finish when the transaction + is committed. + """ + batch = _get_commit_batch(transaction, _options.Options()) + return batch.commit(retries=retries, timeout=timeout) + + +def _get_commit_batch(transaction, options): + """Get the commit batch for the current context and transaction. + + Args: + transaction (bytes): The transaction id. Different transactions will + have different batchs. + options (_options.Options): Options for the batch. Not supported at + this time. + + Returns: + _TransactionalCommitBatch: The batch. + """ + # Support for different options will be tricky if we're in a transaction, + # since we can only do one commit, so any options that affect that gRPC + # call would all need to be identical. For now, no options are supported + # here. + for key, value in options.items(): + if key != "transaction" and value: + raise NotImplementedError("Passed bad option: {!r}".format(key)) + + # Since we're in a transaction, we need to hang on to the batch until + # commit time, so we need to store it separately from other batches. + context = context_module.get_context() + batch = context.commit_batches.get(transaction) + if batch is None: + batch = _TransactionalCommitBatch(transaction, options) + context.commit_batches[transaction] = batch + + return batch + + +class _TransactionalCommitBatch(_NonTransactionalCommitBatch): + """Batch for tracking a set of mutations to be committed for a transaction. + + Attributes: + options (_options.Options): See Args. + mutations (List[datastore_pb2.Mutation]): Sequence of mutation protocol + buffers accumumlated for this batch. + futures (List[tasklets.Future]): Sequence of futures for return results + of the commit. The i-th element of ``futures`` corresponds to the + i-th element of ``mutations``. + transaction (bytes): The transaction id of the transaction for this + commit. + allocating_ids (List[tasklets.Future]): Futures for any calls to + AllocateIds that are fired off before commit. + incomplete_mutations (List[datastore_pb2.Mutation]): List of mutations + with keys which will need ids allocated. Incomplete keys will be + allocated by an idle callback. Any keys still incomplete at commit + time will be allocated by the call to Commit. Only used when in a + transaction. + incomplete_futures (List[tasklets.Future]): List of futures + corresponding to keys in ``incomplete_mutations``. Futures will + receive results of id allocation. + + Args: + transaction (bytes): The transaction id of the transaction for this + commit. + options (_options.Options): The options for the request. Calls with + different options will be placed in different batches. + """ + + def __init__(self, transaction, options): + super(_TransactionalCommitBatch, self).__init__(options) + self.transaction = transaction + self.allocating_ids = [] + self.incomplete_mutations = [] + self.incomplete_futures = [] + self.preparing_to_commit = False + + def put(self, entity_pb): + """Add an entity to batch to be stored. + + Args: + entity_pb (datastore_v1.types.Entity): The entity to be stored. + + Returns: + tasklets.Future: Result will be completed datastore key + (entity_pb2.Key) for the entity. + """ + future = tasklets.Future("put({})".format(entity_pb)) + self.futures.append(future) + mutation = datastore_pb2.Mutation(upsert=entity_pb) + self.mutations.append(mutation) + + # If we have an incomplete key, add the incomplete key to a batch for a + # call to AllocateIds, since the call to actually store the entity + # won't happen until the end of the transaction. + if not _complete(entity_pb.key): + # If this is the first key in the batch, we also need to + # schedule our idle handler to get called + if not self.incomplete_mutations: + _eventloop.add_idle(self.idle_callback) + + self.incomplete_mutations.append(mutation) + self.incomplete_futures.append(future) + + # Can't wait for result, since batch won't be sent until transaction + # has ended. Complete keys get passed back None. + else: + future.set_result(None) + + return future + + def delete(self, key): + """Add a key to batch to be deleted. + + Args: + entity_pb (datastore.Key): The entity's key to be deleted. + + Returns: + tasklets.Future: Result will be :data:`None`, always. + """ + # Can't wait for result, since batch won't be sent until transaction + # has ended. + future = super(_TransactionalCommitBatch, self).delete(key) + future.set_result(None) + return future + + def idle_callback(self): + """Call AllocateIds on any incomplete keys in the batch.""" + # If there are no incomplete mutations, or if we're already preparing + # to commit, there's no need to allocate ids. + if self.preparing_to_commit or not self.incomplete_mutations: + return + + # Signal to a future commit that there is an id allocation in + # progress and it should wait. + allocating_ids = tasklets.Future("AllocateIds") + self.allocating_ids.append(allocating_ids) + + mutations = self.incomplete_mutations + futures = self.incomplete_futures + + def callback(rpc): + self.allocate_ids_callback(rpc, mutations, futures) + + # Signal that we're done allocating these ids + allocating_ids.set_result(None) + + keys = [mutation.upsert.key for mutation in mutations] + rpc = _datastore_allocate_ids( + keys, retries=self.options.retries, timeout=self.options.timeout + ) + rpc.add_done_callback(callback) + + self.incomplete_mutations = [] + self.incomplete_futures = [] + + def allocate_ids_callback(self, rpc, mutations, futures): + """Process the results of a call to AllocateIds.""" + # If RPC has resulted in an exception, propagate that exception to + # all waiting futures. + exception = rpc.exception() + if exception is not None: + for future in futures: + future.set_exception(exception) + return + + # Update mutations with complete keys + response = rpc.result() + for mutation, key, future in zip(mutations, response.keys, futures): + mutation.upsert.key._pb.CopyFrom(key._pb) + future.set_result(key) + + @tasklets.tasklet + def commit(self, retries=None, timeout=None): + """Commit transaction. + + Args: + retries (int): Number of times to potentially retry the call. If + :data:`None` is passed, will use + :data:`_retry._DEFAULT_RETRIES`. If :data:`0` is passed, the + call is attempted only once. + timeout (float): Timeout, in seconds, to pass to gRPC call. If + :data:`None` is passed, will use :data:`_DEFAULT_TIMEOUT`. + """ + # It's tempting to do something like: + # + # if not self.mutations: + # return + # + # However, even if there are no mutations to save, we still need to + # send a COMMIT to the Datastore. It would appear that failing to do so + # will make subsequent writes hang indefinitely as Datastore apparently + # achieves consistency during a transaction by preventing writes. + + # Wait for any calls to AllocateIds that have been fired off so we + # don't allocate ids again in the commit. + for future in self.allocating_ids: + if not future.done(): + yield future + + future = tasklets.Future("Commit") + futures = self.futures + + def commit_callback(rpc): + _process_commit(rpc, futures) + + exception = rpc.exception() + if exception: + future.set_exception(exception) + else: + future.set_result(None) + + rpc = _datastore_commit( + self.mutations, + transaction=self.transaction, + retries=retries, + timeout=timeout, + ) + rpc.add_done_callback(commit_callback) + + yield future + + +def _process_commit(rpc, futures): + """Process the results of a commit request. + + For each mutation, set the result to the key handed back from + Datastore. If a key wasn't allocated for the mutation, this will be + :data:`None`. + + Args: + rpc (tasklets.Tasklet): If not an exception, the result will be an + instance of + :class:`google.cloud.datastore_v1.datastore_pb2.CommitResponse` + futures (List[tasklets.Future]): List of futures waiting on results. + """ + # If RPC has resulted in an exception, propagate that exception to all + # waiting futures. + exception = rpc.exception() + if exception is not None: + for future in futures: + if not future.done(): + future.set_exception(exception) + return + + # "The i-th mutation result corresponds to the i-th mutation in the + # request." + # + # https://github.com/googleapis/googleapis/blob/master/google/datastore/v1/datastore.proto#L241 + response = rpc.result() + utils.logging_debug(log, response) + + results_futures = zip(response.mutation_results, futures) + for mutation_result, future in results_futures: + if future.done(): + continue + + # Datastore only sends a key if one is allocated for the + # mutation. Confusingly, though, if a key isn't allocated, instead + # of getting None, we get a key with an empty path. + if mutation_result.key.path: + key = mutation_result.key + else: + key = None + future.set_result(key) + + +def _complete(key_pb): + """Determines whether a key protocol buffer is complete. + A new key may be left incomplete so that the id can be allocated by the + database. A key is considered incomplete if the last element of the path + has neither a ``name`` or an ``id``. + + Args: + key_pb (entity_pb2.Key): The key to check. + + Returns: + boolean: :data:`True` if key is incomplete, otherwise :data:`False`. + """ + if key_pb.path: + element = key_pb.path[-1] + if element.id or element.name: + return True + + return False + + +def _datastore_commit(mutations, transaction, retries=None, timeout=None, metadata=()): + """Call Commit on Datastore. + + Args: + mutations (List[datastore_pb2.Mutation]): The changes to persist to + Datastore. + transaction (Union[bytes, NoneType]): The identifier for the + transaction for this commit, or :data:`None` if no transaction is + being used. + retries (int): Number of times to potentially retry the call. If + :data:`None` is passed, will use :data:`_retry._DEFAULT_RETRIES`. + If :data:`0` is passed, the call is attempted only once. + timeout (float): Timeout, in seconds, to pass to gRPC call. If + :data:`None` is passed, will use :data:`_DEFAULT_TIMEOUT`. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + tasklets.Tasklet: A future for + :class:`google.cloud.datastore_v1.datastore_pb2.CommitResponse` + """ + if transaction is None: + mode = datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL + else: + mode = datastore_pb2.CommitRequest.Mode.TRANSACTIONAL + + client = context_module.get_context().client + request = datastore_pb2.CommitRequest( + project_id=client.project, + database_id=client.database, + mode=mode, + mutations=mutations, + transaction=transaction, + ) + metadata = _add_routing_info(metadata, request) + + return make_call( + "commit", request, retries=retries, timeout=timeout, metadata=metadata + ) + + +def allocate(keys, options): + """Allocate ids for incomplete keys. + + Args: + key (key.Key): The incomplete key. + options (_options.Options): The options for the request. + + Returns: + tasklets.Future: A future for the key completed with the allocated id. + """ + futures = [] + while keys: + batch = _batch.get_batch(_AllocateIdsBatch, options) + room_left = batch.room_left() + batch_keys = keys[:room_left] + futures.extend(batch.add(batch_keys)) + keys = keys[room_left:] + + return tasklets._MultiFuture(futures) + + +class _AllocateIdsBatch(object): + """Batch for AllocateIds requests. + + Not related to batch used by transactions to allocate ids for upserts + before committing, although they do both eventually call + ``_datastore_allocate_ids``. + + Args: + options (_options.Options): The options for the request. Calls with + different options will be placed in different batches. + """ + + def __init__(self, options): + self.options = options + self.keys = [] + self.futures = [] + + def full(self): + """Indicates whether more work can be added to this batch. + + Returns: + boolean: `True` if number of keys has reached 500, else `False`. + """ + return len(self.keys) >= 500 + + def room_left(self): + """Get how many more keys can be added to this batch. + + Returns: + int: 500 - number of keys already in batch + """ + return 500 - len(self.keys) + + def add(self, keys): + """Add incomplete keys to batch to allocate. + + Args: + keys (list(datastore.key)): Allocate ids for these keys. + + Returns: + tasklets.Future: A future for the eventual keys completed with + allocated ids. + """ + futures = [] + for key in keys: + future = tasklets.Future(info="AllocateIds({})".format(key)) + futures.append(future) + self.keys.append(key) + + self.futures.extend(futures) + return futures + + def idle_callback(self): + """Perform a Datastore AllocateIds request on all batched keys.""" + key_pbs = [key.to_protobuf() for key in self.keys] + rpc = _datastore_allocate_ids( + key_pbs, retries=self.options.retries, timeout=self.options.timeout + ) + rpc.add_done_callback(self.allocate_ids_callback) + + def allocate_ids_callback(self, rpc): + """Process the results of a call to AllocateIds.""" + # If RPC has resulted in an exception, propagate that exception to all + # waiting futures. + exception = rpc.exception() + if exception is not None: + for future in self.futures: + future.set_exception(exception) + return + + for key, future in zip(rpc.result().keys, self.futures): + future.set_result(key) + + +def _datastore_allocate_ids(keys, retries=None, timeout=None, metadata=()): + """Calls ``AllocateIds`` on Datastore. + + Args: + keys (List[google.cloud.datastore_v1.entity_pb2.Key]): List of + incomplete keys to allocate. + retries (int): Number of times to potentially retry the call. If + :data:`None` is passed, will use :data:`_retry._DEFAULT_RETRIES`. + If :data:`0` is passed, the call is attempted only once. + timeout (float): Timeout, in seconds, to pass to gRPC call. If + :data:`None` is passed, will use :data:`_DEFAULT_TIMEOUT`. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + tasklets.Future: A future for + :class:`google.cloud.datastore_v1.datastore_pb2.AllocateIdsResponse` + """ + client = context_module.get_context().client + request = datastore_pb2.AllocateIdsRequest( + project_id=client.project, database_id=client.database, keys=keys + ) + metadata = _add_routing_info(metadata, request) + + return make_call( + "allocate_ids", request, retries=retries, timeout=timeout, metadata=metadata + ) + + +@tasklets.tasklet +def begin_transaction(read_only, retries=None, timeout=None): + """Start a new transaction. + + Args: + read_only (bool): Whether to start a read-only or read-write + transaction. + retries (int): Number of times to potentially retry the call. If + :data:`None` is passed, will use :data:`_retry._DEFAULT_RETRIES`. + If :data:`0` is passed, the call is attempted only once. + timeout (float): Timeout, in seconds, to pass to gRPC call. If + :data:`None` is passed, will use :data:`_DEFAULT_TIMEOUT`. + + Returns: + tasklets.Future: Result will be Transaction Id (bytes) of new + transaction. + """ + response = yield _datastore_begin_transaction( + read_only, retries=retries, timeout=timeout + ) + raise tasklets.Return(response.transaction) + + +def _datastore_begin_transaction(read_only, retries=None, timeout=None, metadata=()): + """Calls ``BeginTransaction`` on Datastore. + + Args: + read_only (bool): Whether to start a read-only or read-write + transaction. + retries (int): Number of times to potentially retry the call. If + :data:`None` is passed, will use :data:`_retry._DEFAULT_RETRIES`. + If :data:`0` is passed, the call is attempted only once. + timeout (float): Timeout, in seconds, to pass to gRPC call. If + :data:`None` is passed, will use :data:`_DEFAULT_TIMEOUT`. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + tasklets.Tasklet: A future for + :class:`google.cloud.datastore_v1.datastore_pb2.BeginTransactionResponse` + """ + client = context_module.get_context().client + if read_only: + options = datastore_pb2.TransactionOptions( + read_only=datastore_pb2.TransactionOptions.ReadOnly() + ) + else: + options = datastore_pb2.TransactionOptions( + read_write=datastore_pb2.TransactionOptions.ReadWrite() + ) + + request = datastore_pb2.BeginTransactionRequest( + project_id=client.project, + database_id=client.database, + transaction_options=options, + ) + metadata = _add_routing_info(metadata, request) + + return make_call( + "begin_transaction", + request, + retries=retries, + timeout=timeout, + metadata=metadata, + ) + + +@tasklets.tasklet +def rollback(transaction, retries=None, timeout=None): + """Rollback a transaction. + + Args: + transaction (bytes): Transaction id. + retries (int): Number of times to potentially retry the call. If + :data:`None` is passed, will use :data:`_retry._DEFAULT_RETRIES`. + If :data:`0` is passed, the call is attempted only once. + timeout (float): Timeout, in seconds, to pass to gRPC call. If + :data:`None` is passed, will use :data:`_DEFAULT_TIMEOUT`. + + Returns: + tasklets.Future: Future completes when rollback is finished. + """ + yield _datastore_rollback(transaction, retries=retries, timeout=timeout) + + +def _datastore_rollback(transaction, retries=None, timeout=None, metadata=()): + """Calls Rollback in Datastore. + + Args: + transaction (bytes): Transaction id. + retries (int): Number of times to potentially retry the call. If + :data:`None` is passed, will use :data:`_retry._DEFAULT_RETRIES`. + If :data:`0` is passed, the call is attempted only once. + timeout (float): Timeout, in seconds, to pass to gRPC call. If + :data:`None` is passed, will use :data:`_DEFAULT_TIMEOUT`. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + tasklets.Tasklet: Future for + :class:`google.cloud.datastore_v1.datastore_pb2.RollbackResponse` + """ + client = context_module.get_context().client + request = datastore_pb2.RollbackRequest( + project_id=client.project, + database_id=client.database, + transaction=transaction, + ) + metadata = _add_routing_info(metadata, request) + + return make_call( + "rollback", request, retries=retries, timeout=timeout, metadata=metadata + ) + + +def _add_routing_info(metadata, request): + """Adds routing header info to the given metadata. + + Args: + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. Not modified. + request (Any): An appropriate request object for the call, eg, + `entity_pb2.LookupRequest` for calling ``Lookup``. + + Returns: + Sequence[Tuple[str, str]]: Sequence with routing info added, + if it is included in the request. + """ + header_params = {} + + if request.project_id: + header_params["project_id"] = request.project_id + + if request.database_id: + header_params["database_id"] = request.database_id + + if header_params: + return tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + return tuple(metadata) diff --git a/packages/google-cloud-ndb/google/cloud/ndb/_datastore_query.py b/packages/google-cloud-ndb/google/cloud/ndb/_datastore_query.py new file mode 100644 index 000000000000..72a9f8a3f761 --- /dev/null +++ b/packages/google-cloud-ndb/google/cloud/ndb/_datastore_query.py @@ -0,0 +1,1088 @@ +# -*- coding: utf-8 -*- +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Translate NDB queries to Datastore calls.""" + +import base64 +import functools +import logging +import os + +from google.cloud import environment_vars + +from google.cloud.datastore_v1.types import datastore as datastore_pb2 +from google.cloud.datastore_v1.types import entity as entity_pb2 +from google.cloud.datastore_v1.types import query as query_pb2 +from google.cloud.datastore import helpers, Key + +from google.cloud.ndb import context as context_module +from google.cloud.ndb import _datastore_api +from google.cloud.ndb import exceptions +from google.cloud.ndb import key as key_module +from google.cloud.ndb import model +from google.cloud.ndb import tasklets +from google.cloud.ndb import utils + +log = logging.getLogger(__name__) + +MoreResultsType = query_pb2.QueryResultBatch.MoreResultsType +NO_MORE_RESULTS = MoreResultsType.NO_MORE_RESULTS +NOT_FINISHED = MoreResultsType.NOT_FINISHED +MORE_RESULTS_AFTER_LIMIT = MoreResultsType.MORE_RESULTS_AFTER_LIMIT + +ResultType = query_pb2.EntityResult.ResultType +RESULT_TYPE_FULL = ResultType.FULL +RESULT_TYPE_KEY_ONLY = ResultType.KEY_ONLY +RESULT_TYPE_PROJECTION = ResultType.PROJECTION + +DOWN = query_pb2.PropertyOrder.Direction.DESCENDING +UP = query_pb2.PropertyOrder.Direction.ASCENDING + +FILTER_OPERATORS = { + "=": query_pb2.PropertyFilter.Operator.EQUAL, + "<": query_pb2.PropertyFilter.Operator.LESS_THAN, + "<=": query_pb2.PropertyFilter.Operator.LESS_THAN_OR_EQUAL, + ">": query_pb2.PropertyFilter.Operator.GREATER_THAN, + ">=": query_pb2.PropertyFilter.Operator.GREATER_THAN_OR_EQUAL, + "!=": query_pb2.PropertyFilter.Operator.NOT_EQUAL, + "in": query_pb2.PropertyFilter.Operator.IN, + "not_in": query_pb2.PropertyFilter.Operator.NOT_IN, +} + +_KEY_NOT_IN_CACHE = object() + + +def make_filter(name, op, value): + """Make a property filter protocol buffer. + + Args: + name (str): The name of the property to filter by. + op (str): The operator to apply in the filter. Must be one of "=", "<", + "<=", ">", or ">=". + value (Any): The value for comparison. + + Returns: + query_pb2.PropertyFilter: The filter protocol buffer. + """ + filter_pb = query_pb2.PropertyFilter( + property=query_pb2.PropertyReference(name=name), + op=FILTER_OPERATORS[op], + ) + helpers._set_protobuf_value(filter_pb.value._pb, value) + return filter_pb + + +def make_composite_and_filter(filter_pbs): + """Make a composite filter protocol buffer using AND. + + Args: + List[Union[query_pb2.PropertyFilter, query_pb2.CompositeFilter]]: The + list of filters to be combined. + + Returns: + query_pb2.CompositeFilter: The new composite filter. + """ + return query_pb2.CompositeFilter( + op=query_pb2.CompositeFilter.Operator.AND, + filters=[_filter_pb(filter_pb) for filter_pb in filter_pbs], + ) + + +@tasklets.tasklet +def fetch(query): + """Fetch query results. + + Args: + query (query.QueryOptions): The query spec. + + Returns: + tasklets.Future: Result is List[Union[model.Model, key.Key]]: The query + results. + """ + results = iterate(query) + entities = [] + while (yield results.has_next_async()): + entities.append(results.next()) + + raise tasklets.Return(entities) + + +def count(query): + """Count query results. + + Args: + query (query.QueryOptions): The query spec. + + Returns: + tasklets.Future: Results is int: Number of results that would be + returned by the query. + """ + filters = query.filters + if filters: + if filters._multiquery or filters._post_filters(): + return _count_brute_force(query) + if bool(os.environ.get(environment_vars.GCD_HOST)): + # The Datastore emulator has some differences from Datastore that would + # break _count_by_skipping. + # - it will never set more_results to NO_MORE_RESULTS + # - it won't set end_cursor to something useful if no results are returned + return _count_brute_force(query) + return _count_by_skipping(query) + + +@tasklets.tasklet +def _count_brute_force(query): + query = query.copy(projection=["__key__"], order_by=None) + results = iterate(query, raw=True) + count = 0 + limit = query.limit + while (yield results.has_next_async()): + count += 1 + if limit and count == limit: + break + + results.next() + + raise tasklets.Return(count) + + +@tasklets.tasklet +def _count_by_skipping(query): + limit = query.limit + query = query.copy(projection=["__key__"], order_by=None, limit=1) + count = 0 + more_results = NOT_FINISHED + cursor = None + + while more_results != NO_MORE_RESULTS: + if limit: + offset = limit - count - 1 + else: + offset = 10000 + + query = query.copy(offset=offset, start_cursor=cursor) + response = yield _datastore_run_query(query) + batch = response.batch + + count += batch.skipped_results + len(batch.entity_results) + if limit and count >= limit: + break + + cursor = Cursor(batch.end_cursor) + + more_results = batch.more_results + + raise tasklets.Return(count) + + +def iterate(query, raw=False): + """Get iterator for query results. + + Args: + query (query.QueryOptions): The query spec. + + Returns: + QueryIterator: The iterator. + """ + filters = query.filters + if filters: + if filters._multiquery: + return _MultiQueryIteratorImpl(query, raw=raw) + + post_filters = filters._post_filters() + if post_filters: + predicate = post_filters._to_filter(post=True) + return _PostFilterQueryIteratorImpl(query, predicate, raw=raw) + + return _QueryIteratorImpl(query, raw=raw) + + +class QueryIterator(object): + """An iterator for query results. + + Executes the given query and provides an interface for iterating over + instances of either :class:`model.Model` or :class:`key.Key` depending on + whether ``keys_only`` was specified for the query. + + This is an abstract base class. Users should not instantiate an iterator + class directly. Use :meth:`query.Query.iter` or ``iter(query)`` to get an + instance of :class:`QueryIterator`. + """ + + def __iter__(self): + return self + + def has_next(self): + """Is there at least one more result? + + Blocks until the answer to this question is known and buffers the + result (if any) until retrieved with :meth:`next`. + + Returns: + bool: :data:`True` if a subsequent call to + :meth:`QueryIterator.next` will return a result, otherwise + :data:`False`. + """ + raise NotImplementedError() + + def has_next_async(self): + """Asynchronous version of :meth:`has_next`. + + Returns: + tasklets.Future: See :meth:`has_next`. + """ + raise NotImplementedError() + + def probably_has_next(self): + """Like :meth:`has_next` but won't block. + + This uses a (sometimes inaccurate) shortcut to avoid having to hit the + Datastore for the answer. + + May return a false positive (:data:`True` when :meth:`next` would + actually raise ``StopIteration``), but never a false negative + (:data:`False` when :meth:`next` would actually return a result). + """ + raise NotImplementedError() + + def next(self): + """Get the next result. + + May block. Guaranteed not to block if immediately following a call to + :meth:`has_next` or :meth:`has_next_async` which will buffer the next + result. + + Returns: + Union[model.Model, key.Key]: Depending on if ``keys_only=True`` was + passed in as an option. + """ + raise NotImplementedError() + + def cursor_before(self): + """Get a cursor to the point just before the last result returned. + + Returns: + Cursor: The cursor. + + Raises: + exceptions.BadArgumentError: If there is no cursor to return. This + will happen if the iterator hasn't returned a result yet, has + only returned a single result so far, or if the iterator has + been exhausted. Also, if query uses ``OR``, ``!=``, or ``IN``, + since those are composites of multiple Datastore queries each + with their own cursors—it is impossible to return a cursor for + the composite query. + """ + raise NotImplementedError() + + def cursor_after(self): + """Get a cursor to the point just after the last result returned. + + Returns: + Cursor: The cursor. + + Raises: + exceptions.BadArgumentError: If there is no cursor to return. This + will happen if the iterator hasn't returned a result yet. Also, + if query uses ``OR``, ``!=``, or ``IN``, since those are + composites of multiple Datastore queries each with their own + cursors—it is impossible to return a cursor for the composite + query. + """ + raise NotImplementedError() + + def index_list(self): + """Return a list of indexes used by the query. + + Raises: + NotImplementedError: Always. This information is no longer + available from query results in Datastore. + """ + raise exceptions.NoLongerImplementedError() + + +class _QueryIteratorImpl(QueryIterator): + """Implementation of :class:`QueryIterator` for single Datastore queries. + + Args: + query (query.QueryOptions): The query spec. + raw (bool): Whether or not to marshall NDB entities or keys for query + results or return internal representations (:class:`_Result`). For + internal use only. + """ + + def __init__(self, query, raw=False): + self._query = query + self._batch = None + self._index = None + self._has_next_batch = None + self._cursor_before = None + self._cursor_after = None + self._raw = raw + + def has_next(self): + """Implements :meth:`QueryIterator.has_next`.""" + return self.has_next_async().result() + + @tasklets.tasklet + def has_next_async(self): + """Implements :meth:`QueryIterator.has_next_async`.""" + if self._batch is None: + yield self._next_batch() # First time + + if self._index < len(self._batch): + raise tasklets.Return(True) + + while self._has_next_batch: + # Firestore will sometimes send us empty batches when there are + # still more results to go. This `while` loop skips those. + yield self._next_batch() + if self._batch: + raise tasklets.Return(self._index < len(self._batch)) + + raise tasklets.Return(False) + + def probably_has_next(self): + """Implements :meth:`QueryIterator.probably_has_next`.""" + return ( + self._batch is None # Haven't even started yet + or self._has_next_batch # There's another batch to fetch + or self._index < len(self._batch) # Not done with current batch + ) + + @tasklets.tasklet + def _next_batch(self): + """Get the next batch from Datastore. + + If this batch isn't the last batch for the query, update the internal + query spec with a cursor pointing to the next batch. + """ + query = self._query + response = yield _datastore_run_query(query) + + batch = response.batch + result_type = batch.entity_result_type + + self._start_cursor = query.start_cursor + self._index = 0 + self._batch = [ + _Result(result_type, result_pb, query.order_by, query_options=query) + for result_pb in response.batch.entity_results + ] + + if result_type == RESULT_TYPE_FULL: + # If we cached a delete, remove it from the result set. This may come cause + # some queries to return less than their limit even if there are more + # results. As far as I can tell, that was also a possibility with the legacy + # version. + context = context_module.get_context() + self._batch = [ + result + for result in self._batch + if result.check_cache(context) is not None + ] + + self._has_next_batch = more_results = batch.more_results == NOT_FINISHED + + self._more_results_after_limit = batch.more_results == MORE_RESULTS_AFTER_LIMIT + + if more_results: + # Fix up query for next batch + limit = self._query.limit + if limit is not None: + limit -= len(self._batch) + + offset = self._query.offset + if offset: + offset -= response.batch.skipped_results + + self._query = self._query.copy( + start_cursor=Cursor(batch.end_cursor), + offset=offset, + limit=limit, + ) + + def next(self): + """Implements :meth:`QueryIterator.next`.""" + # May block + if not self.has_next(): + self._cursor_before = None + raise StopIteration + + # Won't block + next_result = self._batch[self._index] + self._index += 1 + + # Adjust cursors + self._cursor_before = self._cursor_after + self._cursor_after = next_result.cursor + + if not self._raw: + next_result = next_result.entity() + + return next_result + + def _peek(self): + """Get the current, buffered result without advancing the iterator. + + Returns: + _Result: The current result. + + Raises: + KeyError: If there's no current, buffered result. + """ + batch = self._batch + index = self._index + + if batch and index < len(batch): + return batch[index] + + raise KeyError(index) + + __next__ = next + + def cursor_before(self): + """Implements :meth:`QueryIterator.cursor_before`.""" + if self._cursor_before is None: + raise exceptions.BadArgumentError("There is no cursor currently") + + return self._cursor_before + + def cursor_after(self): + """Implements :meth:`QueryIterator.cursor_after.""" + if self._cursor_after is None: + raise exceptions.BadArgumentError("There is no cursor currently") + + return self._cursor_after + + +class _PostFilterQueryIteratorImpl(QueryIterator): + """Iterator for query with post filters. + + A post-filter is a filter that can't be executed server side in Datastore + and therefore must be handled in memory on the client side. This iterator + allows a predicate representing one or more post filters to be applied to + query results, returning only those results which satisfy the condition(s) + enforced by the predicate. + + Args: + query (query.QueryOptions): The query spec. + predicate (Callable[[entity_pb2.Entity], bool]): Predicate from post + filter(s) to be applied. Only entity results for which this + predicate returns :data:`True` will be returned. + raw (bool): Whether or not to marshall NDB entities or keys for query + results or return internal representations (:class:`_Result`). For + internal use only. + """ + + def __init__(self, query, predicate, raw=False): + self._result_set = _QueryIteratorImpl( + query.copy(offset=None, limit=None), raw=True + ) + self._predicate = predicate + self._next_result = None + self._offset = query.offset + self._limit = query.limit + self._cursor_before = None + self._cursor_after = None + self._raw = raw + + def has_next(self): + """Implements :meth:`QueryIterator.has_next`.""" + return self.has_next_async().result() + + @tasklets.tasklet + def has_next_async(self): + """Implements :meth:`QueryIterator.has_next_async`.""" + if self._next_result: + raise tasklets.Return(True) + + if self._limit == 0: + raise tasklets.Return(False) + + # Actually get the next result and load it into memory, or else we + # can't really know + while True: + has_next = yield self._result_set.has_next_async() + if not has_next: + raise tasklets.Return(False) + + next_result = self._result_set.next() + + if not self._predicate(next_result.result_pb.entity): + # Doesn't sastisfy predicate, skip + continue + + # Satisfies predicate + + # Offset? + if self._offset: + self._offset -= 1 + continue + + # Limit? + if self._limit: + self._limit -= 1 + + self._next_result = next_result + + # Adjust cursors + self._cursor_before = self._cursor_after + self._cursor_after = next_result.cursor + + raise tasklets.Return(True) + + def probably_has_next(self): + """Implements :meth:`QueryIterator.probably_has_next`.""" + return bool(self._next_result) or self._result_set.probably_has_next() + + def next(self): + """Implements :meth:`QueryIterator.next`.""" + # Might block + if not self.has_next(): + raise StopIteration() + + # Won't block + next_result = self._next_result + self._next_result = None + if self._raw: + return next_result + else: + return next_result.entity() + + __next__ = next + + def cursor_before(self): + """Implements :meth:`QueryIterator.cursor_before`.""" + if self._cursor_before is None: + raise exceptions.BadArgumentError("There is no cursor currently") + + return self._cursor_before + + def cursor_after(self): + """Implements :meth:`QueryIterator.cursor_after.""" + if self._cursor_after is None: + raise exceptions.BadArgumentError("There is no cursor currently") + + return self._cursor_after + + @property + def _more_results_after_limit(self): + return self._result_set._more_results_after_limit + + +class _MultiQueryIteratorImpl(QueryIterator): + """Multiple Query Iterator + + Some queries that in NDB are logically a single query have to be broken + up into two or more Datastore queries, because Datastore doesn't have a + composite filter with a boolean OR. This iterator merges two or more query + result sets. If the results are ordered, it merges results in sort order, + otherwise it simply chains result sets together. In either case, it removes + any duplicates so that entities that appear in more than one result set + only appear once in the merged set. + + Args: + query (query.QueryOptions): The query spec. + raw (bool): Whether or not to marshall NDB entities or keys for query + results or return internal representations (:class:`_Result`). For + internal use only. + """ + + _extra_projections = None + _coerce_keys_only = False + + def __init__(self, query, raw=False): + projection = query.projection + if query.order_by and projection: + # In an ordered multiquery, result sets have to be merged in order + # by this iterator, so if there's a projection we may need to add a + # property or two to underlying Datastore queries to make sure we + # have the data needed for sorting. + projection = list(projection) + extra_projections = [] + for order in query.order_by: + if order.name not in projection: + extra_projections.append(order.name) + + if extra_projections: + if projection == ["__key__"]: + self._coerce_keys_only = True + projection.extend(extra_projections) + self._extra_projections = extra_projections + + queries = [ + query.copy(filters=node, projection=projection, offset=None, limit=None) + for node in query.filters._nodes + ] + self._result_sets = [iterate(_query, raw=True) for _query in queries] + self._sortable = bool(query.order_by) + self._seen_keys = set() + self._next_result = None + + self._offset = query.offset + self._limit = query.limit + self._raw = raw + + def has_next(self): + """Implements :meth:`QueryIterator.has_next`.""" + return self.has_next_async().result() + + @tasklets.tasklet + def has_next_async(self): + """Implements :meth:`QueryIterator.has_next_async`.""" + if self._next_result: + raise tasklets.Return(True) + + if not self._result_sets: + raise tasklets.Return(False) + + if self._limit == 0: + raise tasklets.Return(False) + + # Actually get the next result and load it into memory, or else we + # can't really know + while True: + has_nexts = yield [ + result_set.has_next_async() for result_set in self._result_sets + ] + + self._result_sets = result_sets = [ + result_set + for i, result_set in enumerate(self._result_sets) + if has_nexts[i] + ] + + if not result_sets: + raise tasklets.Return(False) + + # If sorting, peek at the next values from all result sets and take + # the minimum. + if self._sortable: + min_index, min_value = 0, result_sets[0]._peek() + for i, result_set in enumerate(result_sets[1:], 1): + value = result_sets[i]._peek() + if value < min_value: + min_value = value + min_index = i + + next_result = result_sets[min_index].next() + + # If not sorting, take the next result from the first result set. + # Will exhaust each result set in turn. + else: + next_result = result_sets[0].next() + + # Check to see if it's a duplicate + hash_key = next_result.result_pb.entity.key._pb.SerializeToString() + if hash_key in self._seen_keys: + continue + + # Not a duplicate + self._seen_keys.add(hash_key) + + # Offset? + if self._offset: + self._offset -= 1 + continue + + # Limit? + if self._limit: + self._limit -= 1 + + self._next_result = next_result + + raise tasklets.Return(True) + + def probably_has_next(self): + """Implements :meth:`QueryIterator.probably_has_next`.""" + return bool(self._next_result) or any( + [result_set.probably_has_next() for result_set in self._result_sets] + ) + + def next(self): + """Implements :meth:`QueryIterator.next`.""" + # Might block + if not self.has_next(): + raise StopIteration() + + # Won't block + next_result = self._next_result + self._next_result = None + + # If we had to set extra properties in the projection, elide them now + if self._extra_projections: + properties = next_result.result_pb.entity.properties + for name in self._extra_projections: + if name in properties: + del properties[name] + + if self._raw: + return next_result + else: + entity = next_result.entity() + if self._coerce_keys_only: + return entity._key + return entity + + __next__ = next + + def cursor_before(self): + """Implements :meth:`QueryIterator.cursor_before`.""" + raise exceptions.BadArgumentError("Can't have cursors with OR filter") + + def cursor_after(self): + """Implements :meth:`QueryIterator.cursor_after`.""" + raise exceptions.BadArgumentError("Can't have cursors with OR filter") + + +@functools.total_ordering +class _Result(object): + """A single, sortable query result. + + Args: + result_type (query_pb2.EntityResult.ResultType): The type of result. + result_pb (query_pb2.EntityResult): Protocol buffer result. + order_by (Optional[Sequence[query.PropertyOrder]]): Ordering for the + query. Used to merge sorted result sets while maintaining sort + order. + query_options (Optional[QueryOptions]): Other query_options. + use_cache is the only supported option. + """ + + _key = None + + def __init__(self, result_type, result_pb, order_by=None, query_options=None): + self.result_type = result_type + self.result_pb = result_pb + self.order_by = order_by + + self.cursor = Cursor(result_pb.cursor) + + self._query_options = query_options + + def __lt__(self, other): + """For total ordering.""" + return self._compare(other) == -1 + + def __eq__(self, other): + """For total ordering.""" + if isinstance(other, _Result) and self.result_pb == other.result_pb: + return True + + return self._compare(other) == 0 + + def _compare(self, other): + """Compare this result to another result for sorting. + + Args: + other (_Result): The other result to compare to. + + Returns: + int: :data:`-1` if this result should come before `other`, + :data:`0` if this result is equivalent to `other` for sorting + purposes, or :data:`1` if this result should come after + `other`. + + Raises: + NotImplemented: If `order_by` was not passed to constructor or is + :data:`None` or is empty. + NotImplemented: If `other` is not a `_Result`. + """ + if not self.order_by: + raise NotImplementedError("Can't sort result set without order_by") + + if not isinstance(other, _Result): + return NotImplemented + + for order in self.order_by: + if order.name == "__key__": + this_value = helpers.key_from_protobuf( + self.result_pb.entity.key + ).flat_path + other_value = helpers.key_from_protobuf( + other.result_pb.entity.key + ).flat_path + else: + this_value_pb = self.result_pb.entity.properties[order.name] + this_value = helpers._get_value_from_value_pb(this_value_pb._pb) + other_value_pb = other.result_pb.entity.properties[order.name] + other_value = helpers._get_value_from_value_pb(other_value_pb._pb) + + # Compare key paths if ordering by key property + if isinstance(this_value, Key): + this_value = this_value.flat_path + + if isinstance(other_value, Key): + other_value = other_value.flat_path + + direction = -1 if order.reverse else 1 + + if this_value < other_value: + return -direction + + elif this_value > other_value: + return direction + + return 0 + + def key(self): + """Construct the key for this result. + + Returns: + key.Key: The key. + """ + if self._key is None: + key_pb = self.result_pb.entity.key + ds_key = helpers.key_from_protobuf(key_pb) + self._key = key_module.Key._from_ds_key(ds_key) + + return self._key + + def check_cache(self, context): + """Check local context cache for entity. + + Returns: + Any: The NDB entity for this result, if it is cached, otherwise + `_KEY_NOT_IN_CACHE`. May also return `None` if entity was deleted which + will cause `None` to be recorded in the cache. + """ + key = self.key() + if context._use_cache(key, self._query_options): + try: + return context.cache.get_and_validate(key) + except KeyError: + pass + + return _KEY_NOT_IN_CACHE + + def entity(self): + """Get an entity for an entity result. Use or update the cache if available. + + Args: + projection (Optional[Sequence[str]]): Sequence of property names to + be projected in the query results. + + Returns: + Union[model.Model, key.Key]: The processed result. + """ + + if self.result_type == RESULT_TYPE_FULL: + # First check the cache. + context = context_module.get_context() + entity = self.check_cache(context) + if entity is _KEY_NOT_IN_CACHE: + # entity not in cache, create one, and then add it to cache + entity = model._entity_from_protobuf(self.result_pb.entity) + if context._use_cache(entity.key, self._query_options): + context.cache[entity.key] = entity + return entity + + elif self.result_type == RESULT_TYPE_PROJECTION: + entity = model._entity_from_protobuf(self.result_pb.entity) + projection = tuple(self.result_pb.entity.properties.keys()) + entity._set_projection(projection) + return entity + + elif self.result_type == RESULT_TYPE_KEY_ONLY: + return self.key() + + raise NotImplementedError("Got unexpected entity result type for query.") + + +def _query_to_protobuf(query): + """Convert an NDB query to a Datastore protocol buffer. + + Args: + query (query.QueryOptions): The query spec. + + Returns: + query_pb2.Query: The protocol buffer representation of the query. + """ + query_args = {} + if query.kind: + query_args["kind"] = [query_pb2.KindExpression(name=query.kind)] + + if query.projection: + query_args["projection"] = [ + query_pb2.Projection(property=query_pb2.PropertyReference(name=name)) + for name in query.projection + ] + + if query.distinct_on: + query_args["distinct_on"] = [ + query_pb2.PropertyReference(name=name) for name in query.distinct_on + ] + + if query.order_by: + query_args["order"] = [ + query_pb2.PropertyOrder( + property=query_pb2.PropertyReference(name=order.name), + direction=DOWN if order.reverse else UP, + ) + for order in query.order_by + ] + + filter_pb = query.filters._to_filter() if query.filters else None + + if query.ancestor: + ancestor_pb = query.ancestor._key.to_protobuf() + ancestor_filter_pb = query_pb2.PropertyFilter( + property=query_pb2.PropertyReference(name="__key__"), + op=query_pb2.PropertyFilter.Operator.HAS_ANCESTOR, + ) + ancestor_filter_pb.value.key_value._pb.CopyFrom(ancestor_pb._pb) + + if filter_pb is None: + filter_pb = ancestor_filter_pb + + elif isinstance(filter_pb, query_pb2.CompositeFilter): + filter_pb.filters._pb.add(property_filter=ancestor_filter_pb._pb) + + else: + filter_pb = query_pb2.CompositeFilter( + op=query_pb2.CompositeFilter.Operator.AND, + filters=[ + _filter_pb(filter_pb), + _filter_pb(ancestor_filter_pb), + ], + ) + + if filter_pb is not None: + query_args["filter"] = _filter_pb(filter_pb) + + if query.start_cursor: + query_args["start_cursor"] = query.start_cursor.cursor + + if query.end_cursor: + query_args["end_cursor"] = query.end_cursor.cursor + + query_pb = query_pb2.Query(**query_args) + + if query.offset: + query_pb.offset = query.offset + + if query.limit: + query_pb._pb.limit.value = query.limit + + return query_pb + + +def _filter_pb(filter_pb): + """Convenience function to compose a filter protocol buffer. + + The Datastore protocol uses a Filter message which has one of either a + PropertyFilter or CompositeFilter as a sole attribute. + + Args: + filter_pb (Union[query_pb2.CompositeFilter, query_pb2.PropertyFilter]): + The actual filter. + + Returns: + query_pb2.Filter: The filter at the higher level of abstraction + required to use it in a query. + """ + if isinstance(filter_pb, query_pb2.CompositeFilter): + return query_pb2.Filter(composite_filter=filter_pb) + + return query_pb2.Filter(property_filter=filter_pb) + + +@tasklets.tasklet +def _datastore_run_query(query): + """Run a query in Datastore. + + Args: + query (query.QueryOptions): The query spec. + + Returns: + tasklets.Future: + """ + query_pb = _query_to_protobuf(query) + partition_id = entity_pb2.PartitionId( + project_id=query.project, + database_id=query.database, + namespace_id=query.namespace, + ) + read_options = _datastore_api.get_read_options(query) + request = datastore_pb2.RunQueryRequest( + project_id=query.project, + database_id=query.database, + partition_id=partition_id, + query=query_pb, + read_options=read_options, + ) + metadata = _datastore_api._add_routing_info((), request) + + response = yield _datastore_api.make_call( + "run_query", request, timeout=query.timeout, metadata=metadata + ) + utils.logging_debug(log, response) + raise tasklets.Return(response) + + +class Cursor(object): + """Cursor. + + A pointer to a place in a sequence of query results. Cursor itself is just + a byte sequence passed back by Datastore. This class wraps that with + methods to convert to/from a URL safe string. + + API for converting to/from a URL safe string is different depending on + whether you're reading the Legacy NDB docstrings or the official Legacy NDB + documentation on the web. We do both here. + + Args: + cursor (bytes): Raw cursor value from Datastore + """ + + @classmethod + def from_websafe_string(cls, urlsafe): + # Documented in Legacy NDB docstring for query.Query.fetch + return cls(urlsafe=urlsafe) + + def __init__(self, cursor=None, urlsafe=None): + if cursor and urlsafe: + raise TypeError("Can't pass both 'cursor' and 'urlsafe'") + + self.cursor = cursor + + # Documented in official Legacy NDB docs + if urlsafe: + self.cursor = base64.urlsafe_b64decode(urlsafe) + + def to_websafe_string(self): + # Documented in Legacy NDB docstring for query.Query.fetch + return self.urlsafe() + + def urlsafe(self): + # Documented in official Legacy NDB docs + return base64.urlsafe_b64encode(self.cursor) + + def __eq__(self, other): + if isinstance(other, Cursor): + return self.cursor == other.cursor + + return NotImplemented + + def __ne__(self, other): + # required for Python 2.7 compatibility + result = self.__eq__(other) + if result is NotImplemented: + result = False + return not result + + def __hash__(self): + return hash(self.cursor) diff --git a/packages/google-cloud-ndb/google/cloud/ndb/_datastore_types.py b/packages/google-cloud-ndb/google/cloud/ndb/_datastore_types.py new file mode 100644 index 000000000000..7692040929ad --- /dev/null +++ b/packages/google-cloud-ndb/google/cloud/ndb/_datastore_types.py @@ -0,0 +1,88 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Ported implementations from the Google App Engine SDK. + +These are from the ``google.appengine.api.datastore_types`` module. +The following members have been brought in: + +* ``BlobKey`` +""" + +import functools + +from google.cloud.ndb import exceptions + + +_MAX_STRING_LENGTH = 1500 + + +@functools.total_ordering +class BlobKey(object): + """Key used to identify a blob in the blobstore. + + .. note:: + + The blobstore was an early Google App Engine feature that later became + Google Cloud Storage. + + This class is a simple wrapper a :class:`bytes` object. The bytes represent + a key used internally by the Blobstore API to identify application blobs + (i.e. Google Cloud Storage objects). The key corresponds to the entity name + of the underlying object. + + Args: + blob_key (Optional[bytes]): The key used for the blobstore. + + Raises: + exceptions.BadValueError: If the ``blob_key`` exceeds 1500 bytes. + exceptions.BadValueError: If the ``blob_key`` is not :data:`None` or a + :class:`bytes` instance. + """ + + def __init__(self, blob_key): + if isinstance(blob_key, bytes): + if len(blob_key) > _MAX_STRING_LENGTH: + raise exceptions.BadValueError( + "blob key must be under {:d} " "bytes.".format(_MAX_STRING_LENGTH) + ) + elif blob_key is not None: + raise exceptions.BadValueError( + "blob key should be bytes; received " + "{} (a {})".format(blob_key, type(blob_key).__name__) + ) + + self._blob_key = blob_key + + def __eq__(self, other): + if isinstance(other, BlobKey): + return self._blob_key == other._blob_key + elif isinstance(other, bytes): + return self._blob_key == other + else: + return NotImplemented + + def __lt__(self, other): + if isinstance(other, BlobKey): + # Python 2.7 does not raise an error when other is None. + if other._blob_key is None: + raise TypeError + return self._blob_key < other._blob_key + elif isinstance(other, bytes): + return self._blob_key < other + else: + raise TypeError + + def __hash__(self): + return hash(self._blob_key) diff --git a/packages/google-cloud-ndb/google/cloud/ndb/_eventloop.py b/packages/google-cloud-ndb/google/cloud/ndb/_eventloop.py new file mode 100644 index 000000000000..4d54865d54a2 --- /dev/null +++ b/packages/google-cloud-ndb/google/cloud/ndb/_eventloop.py @@ -0,0 +1,390 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Event loop for running callbacks. + +This should handle both asynchronous ``ndb`` objects and arbitrary callbacks. +""" +import collections +import logging +import uuid +import time + +import queue + +from google.cloud.ndb import utils + +log = logging.getLogger(__name__) + +_Event = collections.namedtuple("_Event", ("when", "callback", "args", "kwargs")) + + +class EventLoop(object): + """An event loop. + + Instances of ``EventLoop`` are used to coordinate single threaded execution + of tasks and RPCs scheduled asynchronously. + + Since the the ``EventLoop`` runs in the same thread as user code, it's best + to think of it as running tasks "on demand". Generally, when some piece of + code needs a result from a future, the future's + :meth:`~tasklets.Future.wait` method will end up calling + :meth:`~EventLoop.run1`, which will attempt to execute a single task that + is queued in the loop. The future will continue to call + :meth:`~EventLoop.run1` until one of the callbacks ultimately puts that + future into it's ``done`` state, either by setting the result or setting an + exception. + + The :meth:`~EventLoop.run` method, which consumes the entire queue before + returning, is usually only run when the end of the containing context is + reached. At this point, there can't be any code waiting for results from + the event loop, so any tasks still queued on the loop at this point, are + just being run without regard for their results. For example, a request + handler for a web application might write some objects to Datastore. This + makes sure those writes complete before we exit from the current context. + + Ultimately, all data flows from calls to gRPC. gRPC handles asynchronous + API calls in its own handler thread, so we use a synchronized queue to + coordinate with gRPC. When a future from a gRPC call is added with + :meth:`~EventLoop.queue_rpc`, a done callback is added to the gRPC future + which causes it to push itself onto the synchronized queue when it is + finished, so we can process the result here in the event loop. From the + finished gRPC call, results will flow back up through whatever series of + other futures were waiting on those results and results derived from those + results. + + Currently, these are the separate queues used by the event loop in the + order they are checked by :meth:`~EventLoop.run1`. For each call to + :meth:`~EventLoop.run1`, the first thing it finds is called: + + current: These callbacks are called first, if there are any. Currently + this is used to schedule calls to + :meth:`tasklets.TaskletFuture._advance_tasklet` when it's time to + send a tasklet a value that it was previously waiting on. + + idlers: Effectively, these are the same as ``current``, but just get + called afterwards. These currently are used for batching certain + calls to the back end. For example, if you call + :func:`_datastore_api.lookup`, a new batch is created, and the key + you're requesting is added to it. Subsequent calls add keys to the + same batch. When the batch is initialized, an idler is added to the + event loop which issues a single Datastore Lookup call for the + entire batch. Because the event loop is called "on demand", this + means this idler won't get called until something needs a result + out of the event loop, and the actual gRPC call is made at that + time. + + queue: These are callbacks that are supposed to be run at (or after) a + certain time. This is used by :function:`tasklets.sleep`. + + rpcs: If all other queues are empty, and we are waiting on results of a + gRPC call, then we'll call :method:`queue.Queue.get` on the + synchronized queue, :attr:`~EventLoop.rpc_results`, to get the next + finished gRPC call. This is the only point where + :method:`~EventLoop.run1` might block. If the only thing to do is + wait for a gRPC call to finish, we may as well wait. + + Attributes: + current (deque): a FIFO list of (callback, args, kwds). These callbacks + run immediately when the eventloop runs. Used by tasklets to + schedule calls to :meth:`tasklets.TaskletFuture._advance_tasklet`. + idlers (deque): a FIFO list of (callback, args, kwds). These callbacks + run only when no other RPCs need to be fired first. Used for + batching calls to the Datastore back end. + inactive (int): Number of consecutive idlers that were noops. Reset + to 0 whenever work is done by any callback, not necessarily by an + idler. Not currently used. + queue (list): a sorted list of (absolute time in sec, callback, args, + kwds), sorted by time. These callbacks run only after the said + time. Used by :func:`tasklets.sleep`. + rpcs (dict): a map from RPC to callback. Callback is called when the + RPC finishes. + rpc_results (queue.Queue): A synchronized queue used to coordinate with + gRPC. As gRPC futures that we're waiting on are finished, they will + get added to this queue and then processed by the event loop. + """ + + def __init__(self): + self.current = collections.deque() + self.idlers = collections.deque() + self.inactive = 0 + self.queue = [] + self.rpcs = {} + self.rpc_results = queue.Queue() + + def clear(self): + """Remove all pending events without running any.""" + while self.current or self.idlers or self.queue or self.rpcs: + current = self.current + idlers = self.idlers + queue = self.queue + rpcs = self.rpcs + utils.logging_debug(log, "Clearing stale EventLoop instance...") + if current: + utils.logging_debug(log, " current = {}", current) + if idlers: + utils.logging_debug(log, " idlers = {}", idlers) + if queue: + utils.logging_debug(log, " queue = {}", queue) + if rpcs: + utils.logging_debug(log, " rpcs = {}", rpcs) + self.__init__() + current.clear() + idlers.clear() + queue[:] = [] + rpcs.clear() + utils.logging_debug(log, "Cleared") + + def insort_event_right(self, event): + """Insert event in queue with sorting. + + This function assumes the queue is already sorted by ``event.when`` and + inserts ``event`` in the queue, maintaining the sort. + + For events with same `event.when`, new events are inserted to the + right, to keep FIFO order. + + Args: + event (_Event): The event to insert. + """ + queue = self.queue + low = 0 + high = len(queue) + while low < high: + mid = (low + high) // 2 + if event.when < queue[mid].when: + high = mid + else: + low = mid + 1 + queue.insert(low, event) + + def call_soon(self, callback, *args, **kwargs): + """Schedule a function to be called soon, without a delay. + + Arguments: + callback (callable): The function to eventually call. + *args: Positional arguments to be passed to callback. + **kwargs: Keyword arguments to be passed to callback. + """ + self.current.append((callback, args, kwargs)) + + def queue_call(self, delay, callback, *args, **kwargs): + """Schedule a function call at a specific time in the future. + + Arguments: + delay (float): Time in seconds to delay running the callback. + Times over a billion seconds are assumed to be absolute + timestamps rather than delays. + callback (callable): The function to eventually call. + *args: Positional arguments to be passed to callback. + **kwargs: Keyword arguments to be passed to callback. + """ + when = time.time() + delay if delay < 1e9 else delay + event = _Event(when, callback, args, kwargs) + self.insort_event_right(event) + + def queue_rpc(self, rpc, callback): + """Add a gRPC call to the queue. + + Args: + rpc (:class:`_remote.RemoteCall`): The future for the gRPC + call. + callback (Callable[[:class:`_remote.RemoteCall`], None]): + Callback function to execute when gRPC call has finished. + + gRPC handles its asynchronous calls in a separate processing thread, so + we add our own callback to `rpc` which adds `rpc` to a synchronized + queue when it has finished. The event loop consumes the synchronized + queue and calls `callback` with the finished gRPC future. + """ + rpc_id = uuid.uuid1() + self.rpcs[rpc_id] = callback + + def rpc_callback(rpc): + self.rpc_results.put((rpc_id, rpc)) + + rpc.add_done_callback(rpc_callback) + + def add_idle(self, callback, *args, **kwargs): + """Add an idle callback. + + An idle callback is a low priority task which is executed when + there aren't other events scheduled for immediate execution. + + An idle callback can return True, False or None. These mean: + + - None: remove the callback (don't reschedule) + - False: the callback did no work; reschedule later + - True: the callback did some work; reschedule soon + + If the callback raises an exception, the traceback is logged and + the callback is removed. + + Arguments: + callback (callable): The function to eventually call. + *args: Positional arguments to be passed to callback. + **kwargs: Keyword arguments to be passed to callback. + """ + self.idlers.append((callback, args, kwargs)) + + def run_idle(self): + """Run one of the idle callbacks. + + Returns: + bool: Indicates if an idle callback was called. + """ + if not self.idlers or self.inactive >= len(self.idlers): + return False + idler = self.idlers.popleft() + callback, args, kwargs = idler + utils.logging_debug(log, "idler: {}", callback.__name__) + result = callback(*args, **kwargs) + + # See add_idle() for meaning of callback return value. + if result is None: + utils.logging_debug(log, "idler {} removed", callback.__name__) + else: + if result: + self.inactive = 0 + else: + self.inactive += 1 + self.idlers.append(idler) + return True + + def _run_current(self): + """Run one current item. + + Returns: + bool: Indicates if an idle callback was called. + """ + if not self.current: + return False + + self.inactive = 0 + callback, args, kwargs = self.current.popleft() + callback(*args, **kwargs) + return True + + def run0(self): + """Run one item (a callback or an RPC wait_any). + + Returns: + float: A time to sleep if something happened (may be 0); + None if all queues are empty. + """ + if self._run_current() or self.run_idle(): + return 0 + + delay = None + if self.queue: + delay = self.queue[0][0] - time.time() + if delay <= 0: + self.inactive = 0 + _, callback, args, kwargs = self.queue.pop(0) + utils.logging_debug(log, "event: {}", callback.__name__) + callback(*args, **kwargs) + return 0 + + if self.rpcs: + # Avoid circular import + from google.cloud.ndb import context as context_module + + context = context_module.get_toplevel_context() + + # This potentially blocks, waiting for an rpc to finish and put its + # result on the queue. Functionally equivalent to the ``wait_any`` + # call that was used here in legacy NDB. + start_time = time.time() + rpc_id, rpc = self.rpc_results.get() + elapsed = time.time() - start_time + utils.logging_debug(log, "Blocked for {}s awaiting RPC results.", elapsed) + context.wait_time += elapsed + + callback = self.rpcs.pop(rpc_id) + callback(rpc) + return 0 + + return delay + + def run1(self): + """Run one item (a callback or an RPC wait_any) or sleep. + + Returns: + bool: True if something happened; False if all queues are empty. + """ + delay = self.run0() + if delay is None: + return False + if delay > 0: + time.sleep(delay) + return True + + def run(self): + """Run until there's nothing left to do.""" + self.inactive = 0 + while True: + if not self.run1(): + break + + +def get_event_loop(): + """Get the current event loop. + + This function should be called within a context established by + :func:`~google.cloud.ndb.ndb_context`. + + Returns: + EventLoop: The event loop for the current context. + """ + # Prevent circular import in Python 2.7 + from google.cloud.ndb import context as context_module + + context = context_module.get_context() + return context.eventloop + + +def add_idle(callback, *args, **kwargs): + """Calls :method:`EventLoop.add_idle` on current event loop.""" + loop = get_event_loop() + loop.add_idle(callback, *args, **kwargs) + + +def call_soon(callback, *args, **kwargs): + """Calls :method:`EventLoop.call_soon` on current event loop.""" + loop = get_event_loop() + loop.call_soon(callback, *args, **kwargs) + + +def queue_call(delay, callback, *args, **kwargs): + """Calls :method:`EventLoop.queue_call` on current event loop.""" + loop = get_event_loop() + loop.queue_call(delay, callback, *args, **kwargs) + + +def queue_rpc(future, rpc): + """Calls :method:`EventLoop.queue_rpc` on current event loop.""" + loop = get_event_loop() + loop.queue_rpc(future, rpc) + + +def run(): + """Calls :method:`EventLoop.run` on current event loop.""" + loop = get_event_loop() + loop.run() + + +def run1(): + """Calls :method:`EventLoop.run1` on current event loop.""" + loop = get_event_loop() + return loop.run1() diff --git a/packages/google-cloud-ndb/google/cloud/ndb/_gql.py b/packages/google-cloud-ndb/google/cloud/ndb/_gql.py new file mode 100644 index 000000000000..50e2d65de540 --- /dev/null +++ b/packages/google-cloud-ndb/google/cloud/ndb/_gql.py @@ -0,0 +1,875 @@ +import datetime +import re +import time + +from google.cloud.ndb import context as context_module +from google.cloud.ndb import exceptions +from google.cloud.ndb import query as query_module +from google.cloud.ndb import key +from google.cloud.ndb import model +from google.cloud.ndb import _datastore_query + + +class GQL(object): + """A GQL parser for NDB queries. + + GQL is a SQL-like language which supports more object-like semantics + in a language that is familiar to SQL users. + + - reserved words are case insensitive + - names are case sensitive + + The syntax for SELECT is fairly straightforward: + + SELECT [[DISTINCT] [, ...] | * | __key__ ] + [FROM ] + [WHERE [AND ...]] + [ORDER BY [ASC | DESC] [, [ASC | DESC] ...]] + [LIMIT [,]] + [OFFSET ] + [HINT (ORDER_FIRST | FILTER_FIRST | ANCESTOR_FIRST)] + [;] + := {< | <= | > | >= | = | != | IN | NOT IN} + := {< | <= | > | >= | = | != | IN | NOT IN} CAST() + := {IN | NOT IN} (, ...) + := ANCESTOR IS + + The class is implemented using some basic regular expression tokenization + to pull out reserved tokens and then the recursive descent parser will act + as a builder for the pre-compiled query. This pre-compiled query is then + used by google.cloud.ndb.query.gql to build an NDB Query object. + """ + + TOKENIZE_REGEX = re.compile( + r""" + (?:'[^'\n\r]*')+| + <=|>=|!=|=|<|>| + :\w+| + ,| + \*| + -?\d+(?:\.\d+)?| + \w+(?:\.\w+)*| + (?:"[^"\s]+")+| + \(|\)| + \S+ + """, + re.VERBOSE | re.IGNORECASE, + ) + + RESERVED_KEYWORDS = frozenset( + ( + "SELECT", + "DISTINCT", + "FROM", + "WHERE", + "IN", + "IS", + "AND", + "OR", + "NOT", + "ORDER", + "BY", + "ASC", + "DESC", + "GROUP", + "LIMIT", + "OFFSET", + "HINT", + "ORDER_FIRST", + "FILTER_FIRST", + "ANCESTOR_FIRST", + ) + ) + + _ANCESTOR = -1 + + _kind = None + _keys_only = False + _projection = None + _distinct = False + _has_ancestor = False + _offset = -1 + _limit = -1 + _hint = "" + + def __init__(self, query_string, _app=None, _auth_domain=None, namespace=None): + """Parses the input query into the class as a pre-compiled query. + + Args: + query_string (str): properly formatted GQL query string. + namespace (str): The namespace to use for this query. Defaults to the client's value. + Raises: + exceptions.BadQueryError: if the query is not parsable. + """ + self._app = _app + + self._namespace = namespace + + self._auth_domain = _auth_domain + + self._symbols = self.TOKENIZE_REGEX.findall(query_string) + self._InitializeParseState() + try: + self._Select() + except exceptions.BadQueryError as error: + raise error + + def _InitializeParseState(self): + self._kind = None + self._keys_only = False + self._projection = None + self._distinct = False + self._has_ancestor = False + self._offset = -1 + self._limit = -1 + self._hint = "" + + self._filters = {} + + self._orderings = [] + self._next_symbol = 0 + + def filters(self): + """Return the compiled list of filters.""" + return self._filters + + def hint(self): + """Return the datastore hint. + + This is not used in NDB, but added for backwards compatibility. + """ + return self._hint + + def limit(self): + """Return numerical result count limit.""" + return self._limit + + def offset(self): + """Return numerical result offset.""" + if self._offset == -1: + return 0 + else: + return self._offset + + def orderings(self): + """Return the result ordering list.""" + return self._orderings + + def is_keys_only(self): + """Returns True if this query returns Keys, False if it returns + Entities.""" + return self._keys_only + + def projection(self): + """Returns the tuple of properties in the projection, or None.""" + return self._projection + + def is_distinct(self): + """Returns True if this query is marked as distinct.""" + return self._distinct + + def kind(self): + """Returns the kind for this query.""" + return self._kind + + @property + def _entity(self): + """Deprecated. Old way to refer to `kind`.""" + return self._kind + + _result_type_regex = re.compile(r"(\*|__key__)") + _quoted_string_regex = re.compile(r"((?:\'[^\'\n\r]*\')+)") + _ordinal_regex = re.compile(r":(\d+)$") + _named_regex = re.compile(r":(\w+)$") + _identifier_regex = re.compile(r"(\w+(?:\.\w+)*)$") + + _quoted_identifier_regex = re.compile(r'((?:"[^"\s]+")+)$') + _conditions_regex = re.compile(r"(<=|>=|!=|=|<|>|is|in|not)$", re.IGNORECASE) + _number_regex = re.compile(r"(\d+)$") + _cast_regex = re.compile(r"(geopt|user|key|date|time|datetime)$", re.IGNORECASE) + + def _Error(self, error_message): + """Generic query error. + + Args: + error_message (str): message for the 'Parse Error' string. + + Raises: + BadQueryError and passes on an error message from the caller. Will + raise BadQueryError on all calls to _Error() + """ + if self._next_symbol >= len(self._symbols): + raise exceptions.BadQueryError( + "Parse Error: %s at end of string" % error_message + ) + else: + raise exceptions.BadQueryError( + "Parse Error: %s at symbol %s" + % (error_message, self._symbols[self._next_symbol]) + ) + + def _Accept(self, symbol_string): + """Advance the symbol and return true if the next symbol matches input.""" + if self._next_symbol < len(self._symbols): + if self._symbols[self._next_symbol].upper() == symbol_string: + self._next_symbol += 1 + return True + return False + + def _Expect(self, symbol_string): + """Require that the next symbol matches symbol_string, or emit an error. + + Args: + symbol_string (str): next symbol expected by the caller + + Raises: + BadQueryError if the next symbol doesn't match the parameter passed + in. + """ + if not self._Accept(symbol_string): + self._Error("Unexpected Symbol: %s" % symbol_string) + + def _AcceptRegex(self, regex): + """Advance and return the symbol if the next symbol matches the regex. + + Args: + regex: the compiled regular expression to attempt acceptance on. + + Returns: + The first group in the expression to allow for convenient access + to simple matches. Requires () around some objects in the + regex. None if no match is found. + """ + if self._next_symbol < len(self._symbols): + match_symbol = self._symbols[self._next_symbol] + match = regex.match(match_symbol) + if match: + self._next_symbol += 1 + matched_string = match.groups() and match.group(1) or None + + return matched_string + + return None + + def _AcceptTerminal(self): + """Accept either a single semi-colon or an empty string. + + Returns: + True + + Raises: + BadQueryError if there are unconsumed symbols in the query. + """ + + self._Accept(";") + + if self._next_symbol < len(self._symbols): + self._Error("Expected no additional symbols") + return True + + def _Select(self): + """Consume the SELECT clause and everything that follows it. + + Assumes SELECT * to start. Transitions to a FROM clause. + + Returns: + True if parsing completed okay. + """ + self._Expect("SELECT") + if self._Accept("DISTINCT"): + self._distinct = True + if not self._Accept("*"): + props = [self._ExpectIdentifier()] + while self._Accept(","): + props.append(self._ExpectIdentifier()) + if props == ["__key__"]: + self._keys_only = True + else: + self._projection = tuple(props) + return self._From() + + def _From(self): + """Consume the FROM clause. + + Assumes a single well formed entity in the clause. + Assumes FROM . Transitions to a WHERE clause. + + Returns: + True: if parsing completed okay. + """ + if self._Accept("FROM"): + self._kind = self._ExpectIdentifier() + return self._Where() + + def _Where(self): + """Consume the WHERE clause. + + These can have some recursion because of the AND symbol. + + Returns: + True: if parsing the WHERE clause completed correctly, as well as + all subsequent clauses. + """ + if self._Accept("WHERE"): + return self._FilterList() + return self._OrderBy() + + def _FilterList(self): + """Consume the filter list (remainder of the WHERE clause).""" + identifier = self._Identifier() + if not identifier: + self._Error("Invalid WHERE Identifier") + + condition = self._AcceptRegex(self._conditions_regex) + if not condition: + self._Error("Invalid WHERE Condition") + if condition.lower() == "not": + condition += "_" + self._AcceptRegex(self._conditions_regex) + + self._CheckFilterSyntax(identifier, condition) + + if not self._AddSimpleFilter(identifier, condition, self._Reference()): + if not self._AddSimpleFilter(identifier, condition, self._Literal()): + type_cast = self._TypeCast() + if not type_cast or not self._AddProcessedParameterFilter( + identifier, condition, *type_cast + ): + self._Error("Invalid WHERE Condition") + + if self._Accept("AND"): + return self._FilterList() + + return self._OrderBy() + + def _GetValueList(self): + """Read in a list of parameters from the tokens and return the list. + + Reads in a set of tokens by consuming symbols. Only accepts literals, + positional parameters, or named parameters. + + Returns: + list: Values parsed from the input. + """ + params = [] + + while True: + reference = self._Reference() + if reference: + params.append(reference) + else: + literal = self._Literal() + params.append(literal) + + if not self._Accept(","): + break + + return params + + def _CheckFilterSyntax(self, identifier, raw_condition): + """Check that filter conditions are valid and throw errors if not. + + Args: + identifier (str): identifier being used in comparison. + condition (str): comparison operator used in the filter. + """ + condition = raw_condition.lower() + if identifier.lower() == "ancestor": + if condition == "is": + if self._has_ancestor: + self._Error('Only one ANCESTOR IS" clause allowed') + else: + self._Error('"IS" expected to follow "ANCESTOR"') + elif condition == "is": + self._Error('"IS" can only be used when comparing against "ANCESTOR"') + elif condition.startswith("not") and condition != "not_in": + self._Error('"NOT " can only be used as "NOT IN"') + + def _AddProcessedParameterFilter(self, identifier, condition, operator, parameters): + """Add a filter with post-processing required. + + Args: + identifier (str): property being compared. + condition (str): comparison operation being used with the property + (e.g. !=). + operator (str): operation to perform on the parameters before + adding the filter. + parameters (list): list of bound parameters passed to 'operator' + before creating the filter. When using the parameters as a + pass-through, pass 'nop' into the operator field and the first + value will be used unprocessed). + + Returns: + True: if the filter was okay to add. + """ + if parameters[0] is None: + return False + + filter_rule = (identifier, condition) + if identifier.lower() == "ancestor": + self._has_ancestor = True + filter_rule = (self._ANCESTOR, "is") + assert condition.lower() == "is" + + if operator == "list" and condition.lower() not in ["in", "not_in"]: + self._Error("Only IN can process a list of values, given '%s'" % condition) + + self._filters.setdefault(filter_rule, []).append((operator, parameters)) + return True + + def _AddSimpleFilter(self, identifier, condition, parameter): + """Add a filter to the query being built (no post-processing on parameter). + + Args: + identifier (str): identifier being used in comparison. + condition (str): comparison operator used in the filter. + parameter (Union[str, int, Literal]: ID of the reference being made + or a value of type Literal + + Returns: + bool: True if the filter could be added. False otherwise. + """ + return self._AddProcessedParameterFilter( + identifier, condition, "nop", [parameter] + ) + + def _Identifier(self): + """Consume an identifier and return it. + + Returns: + str: The identifier string. If quoted, the surrounding quotes are + stripped. + """ + identifier = self._AcceptRegex(self._identifier_regex) + if identifier: + if identifier.upper() in self.RESERVED_KEYWORDS: + self._next_symbol -= 1 + self._Error("Identifier is a reserved keyword") + else: + identifier = self._AcceptRegex(self._quoted_identifier_regex) + if identifier: + identifier = identifier[1:-1].replace('""', '"') + return identifier + + def _ExpectIdentifier(self): + id = self._Identifier() + if not id: + self._Error("Identifier Expected") + return id + + def _Reference(self): + """Consume a parameter reference and return it. + + Consumes a reference to a positional parameter (:1) or a named + parameter (:email). Only consumes a single reference (not lists). + + Returns: + Union[str, int]: The name of the reference (integer for positional + parameters or string for named parameters) to a bind-time + parameter. + """ + reference = self._AcceptRegex(self._ordinal_regex) + if reference: + return int(reference) + else: + reference = self._AcceptRegex(self._named_regex) + if reference: + return reference + + return None + + def _Literal(self): + """Parse literals from our token list. + + Returns: + Literal: The parsed literal from the input string (currently either + a string, integer, floating point value, boolean or None). + """ + + literal = None + + if self._next_symbol < len(self._symbols): + try: + literal = int(self._symbols[self._next_symbol]) + except ValueError: + pass + else: + self._next_symbol += 1 + + if literal is None: + try: + literal = float(self._symbols[self._next_symbol]) + except ValueError: + pass + else: + self._next_symbol += 1 + + if literal is None: + literal = self._AcceptRegex(self._quoted_string_regex) + if literal: + literal = literal[1:-1].replace("''", "'") + + if literal is None: + if self._Accept("TRUE"): + literal = True + elif self._Accept("FALSE"): + literal = False + + if literal is not None: + return Literal(literal) + + if self._Accept("NULL"): + return Literal(None) + else: + return None + + def _TypeCast(self, can_cast_list=True): + """Check if the next operation is a type-cast and return the cast if so. + + Casting operators look like simple function calls on their parameters. + This code returns the cast operator found and the list of parameters + provided by the user to complete the cast operation. + + Args: + can_cast_list: Boolean to determine if list can be returned as one + of the cast operators. Default value is True. + + Returns: + tuple: (cast operator, params) which represents the cast operation + requested and the parameters parsed from the cast clause. + Returns :data:None if there is no TypeCast function or list is + not allowed to be cast. + """ + cast_op = self._AcceptRegex(self._cast_regex) + if not cast_op: + if can_cast_list and self._Accept("("): + cast_op = "list" + else: + return None + else: + cast_op = cast_op.lower() + self._Expect("(") + + params = self._GetValueList() + self._Expect(")") + + return (cast_op, params) + + def _OrderBy(self): + """Consume the ORDER BY clause.""" + if self._Accept("ORDER"): + self._Expect("BY") + return self._OrderList() + return self._Limit() + + def _OrderList(self): + """Consume variables and sort order for ORDER BY clause.""" + identifier = self._Identifier() + if identifier: + if self._Accept("DESC"): + self._orderings.append((identifier, _datastore_query.DOWN)) + elif self._Accept("ASC"): + self._orderings.append((identifier, _datastore_query.UP)) + else: + self._orderings.append((identifier, _datastore_query.UP)) + else: + self._Error("Invalid ORDER BY Property") + + if self._Accept(","): + return self._OrderList() + return self._Limit() + + def _Limit(self): + """Consume the LIMIT clause.""" + if self._Accept("LIMIT"): + maybe_limit = self._AcceptRegex(self._number_regex) + + if maybe_limit: + if self._Accept(","): + self._offset = int(maybe_limit) + maybe_limit = self._AcceptRegex(self._number_regex) + + self._limit = int(maybe_limit) + if self._limit < 1: + self._Error("Bad Limit in LIMIT Value") + else: + self._Error("Non-number limit in LIMIT clause") + + return self._Offset() + + def _Offset(self): + """Consume the OFFSET clause.""" + if self._Accept("OFFSET"): + if self._offset != -1: + self._Error("Offset already defined in LIMIT clause") + offset = self._AcceptRegex(self._number_regex) + if offset: + self._offset = int(offset) + else: + self._Error("Non-number offset in OFFSET clause") + return self._Hint() + + def _Hint(self): + """Consume the HINT clause. + + Requires one of three options (mirroring the rest of the datastore): + + - HINT ORDER_FIRST + - HINT ANCESTOR_FIRST + - HINT FILTER_FIRST + + Returns: + bool: True if the hint clause and later clauses all parsed + correctly. + """ + if self._Accept("HINT"): + if self._Accept("ORDER_FIRST"): + self._hint = "ORDER_FIRST" + elif self._Accept("FILTER_FIRST"): + self._hint = "FILTER_FIRST" + elif self._Accept("ANCESTOR_FIRST"): + self._hint = "ANCESTOR_FIRST" + else: + self._Error("Unknown HINT") + return self._AcceptTerminal() + + def _args_to_val(self, func, args): + """Helper for GQL parsing to extract values from GQL expressions. + + This can extract the value from a GQL literal, return a Parameter + for a GQL bound parameter (:1 or :foo), and interprets casts like + KEY(...) and plain lists of values like (1, 2, 3). + + Args: + func (str): A string indicating what kind of thing this is. + args list[Union[int, str, Literal]]: One or more GQL values, each + integer, string, or GQL literal. + """ + vals = [] + for arg in args: + if isinstance(arg, (str, int)): + val = query_module.Parameter(arg) + else: + val = arg.Get() + vals.append(val) + if func == "nop": + return vals[0] # May be a Parameter + pfunc = query_module.ParameterizedFunction(func, vals) + if pfunc.is_parameterized(): + return pfunc + return pfunc.resolve({}, {}) + + def query_filters(self, model_class, filters): + """Get the filters in a format compatible with the Query constructor""" + gql_filters = self.filters() + for name_op in sorted(gql_filters): + name, op = name_op + values = gql_filters[name_op] + op = op.lower() + for func, args in values: + prop = model_class._properties.get(name) + val = self._args_to_val(func, args) + if isinstance(val, query_module.ParameterizedThing): + node = query_module.ParameterNode(prop, op, val) + elif op == "in": + node = prop._IN(val) + elif op == "not_in": + node = prop._NOT_IN(val) + else: + node = prop._comparison(op, val) + filters.append(node) + if filters: + filters = query_module.ConjunctionNode(*filters) + else: + filters = None + return filters + + def get_query(self): + """Create and return a Query instance. + + Returns: + google.cloud.ndb.query.Query: A new query with values extracted + from the processed GQL query string. + """ + kind = self.kind() + if kind is None: + model_class = model.Model + else: + model_class = model.Model._lookup_model(kind) + kind = model_class._get_kind() + ancestor = None + model_filters = list(model_class._default_filters()) + filters = self.query_filters(model_class, model_filters) + default_options = None + offset = self.offset() + limit = self.limit() + if limit < 0: + limit = None + keys_only = self.is_keys_only() + if not keys_only: + keys_only = None + projection = self.projection() + project = self._app + namespace = self._namespace + if self.is_distinct(): + distinct_on = projection + else: + distinct_on = None + order_by = [] + for order in self.orderings(): + order_str, direction = order + if direction == 2: + order_str = "-{}".format(order_str) + order_by.append(order_str) + return query_module.Query( + kind=kind, + ancestor=ancestor, + filters=filters, + order_by=order_by, + project=project, + namespace=namespace, + default_options=default_options, + projection=projection, + distinct_on=distinct_on, + limit=limit, + offset=offset, + keys_only=keys_only, + ) + + +class Literal(object): + """Class for representing literal values differently than unbound params. + This is a simple wrapper class around basic types and datastore types. + """ + + def __init__(self, value): + self._value = value + + def Get(self): + """Return the value of the literal.""" + return self._value + + def __eq__(self, other): + """A literal is equal to another if their values are the same""" + if not isinstance(other, Literal): + return NotImplemented + return self.Get() == other.Get() + + def __repr__(self): + return "Literal(%s)" % repr(self._value) + + +def _raise_not_implemented(func): + def raise_inner(value): + raise NotImplementedError("GQL function {} is not implemented".format(func)) + + return raise_inner + + +def _raise_cast_error(message): + raise exceptions.BadQueryError("GQL function error: {}".format(message)) + + +def _time_function(values): + if len(values) == 1: + value = values[0] + if isinstance(value, str): + try: + time_tuple = time.strptime(value, "%H:%M:%S") + except ValueError as error: + _raise_cast_error( + "Error during time conversion, {}, {}".format(error, values) + ) + time_tuple = time_tuple[3:] + time_tuple = time_tuple[0:3] + elif isinstance(value, int): + time_tuple = (value,) + else: + _raise_cast_error("Invalid argument for time(), {}".format(value)) + elif len(values) < 4: + time_tuple = tuple(values) + else: + _raise_cast_error("Too many arguments for time(), {}".format(values)) + try: + return datetime.time(*time_tuple) + except ValueError as error: + _raise_cast_error("Error during time conversion, {}, {}".format(error, values)) + + +def _date_function(values): + if len(values) == 1: + value = values[0] + if isinstance(value, str): + try: + time_tuple = time.strptime(value, "%Y-%m-%d")[0:6] + except ValueError as error: + _raise_cast_error( + "Error during date conversion, {}, {}".format(error, values) + ) + else: + _raise_cast_error("Invalid argument for date(), {}".format(value)) + elif len(values) == 3: + time_tuple = (values[0], values[1], values[2], 0, 0, 0) + else: + _raise_cast_error("Too many arguments for date(), {}".format(values)) + try: + return datetime.datetime(*time_tuple) + except ValueError as error: + _raise_cast_error("Error during date conversion, {}, {}".format(error, values)) + + +def _datetime_function(values): + if len(values) == 1: + value = values[0] + if isinstance(value, str): + try: + time_tuple = time.strptime(value, "%Y-%m-%d %H:%M:%S")[0:6] + except ValueError as error: + _raise_cast_error( + "Error during date conversion, {}, {}".format(error, values) + ) + else: + _raise_cast_error("Invalid argument for datetime(), {}".format(value)) + else: + time_tuple = values + try: + return datetime.datetime(*time_tuple) + except ValueError as error: + _raise_cast_error( + "Error during datetime conversion, {}, {}".format(error, values) + ) + + +def _geopt_function(values): + if len(values) != 2: + _raise_cast_error("GeoPt requires two input values, {}".format(values)) + return model.GeoPt(*values) + + +def _key_function(values): + if not len(values) % 2: + context = context_module.get_context() + client = context.client + return key.Key( + *values, + project=client.project, + database=client.database, + namespace=context.get_namespace(), + ) + _raise_cast_error( + "Key requires even number of operands or single string, {}".format(values) + ) + + +FUNCTIONS = { + "list": list, + "date": _date_function, + "datetime": _datetime_function, + "time": _time_function, + # even though gql for ndb supports querying for users, datastore does + # not, because it doesn't support passing entity representations as + # comparison arguments. Thus, we can't implement this. + "user": _raise_not_implemented("user"), + "key": _key_function, + "geopt": _geopt_function, + "nop": _raise_not_implemented("nop"), +} diff --git a/packages/google-cloud-ndb/google/cloud/ndb/_legacy_entity_pb.py b/packages/google-cloud-ndb/google/cloud/ndb/_legacy_entity_pb.py new file mode 100644 index 000000000000..d171d2737822 --- /dev/null +++ b/packages/google-cloud-ndb/google/cloud/ndb/_legacy_entity_pb.py @@ -0,0 +1,810 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from google.cloud.ndb import _legacy_protocol_buffer as ProtocolBuffer + + +class PropertyValue_ReferenceValuePathElement(ProtocolBuffer.ProtocolMessage): + has_type_ = 0 + type_ = "" + has_id_ = 0 + id_ = 0 + has_name_ = 0 + name_ = "" + + def type(self): + return self.type_ + + def set_type(self, x): + self.has_type_ = 1 + self.type_ = x + + def has_type(self): + return self.has_type_ + + def id(self): + return self.id_ + + def set_id(self, x): + self.has_id_ = 1 + self.id_ = x + + def has_id(self): + return self.has_id_ + + def name(self): + return self.name_ + + def set_name(self, x): + self.has_name_ = 1 + self.name_ = x + + def has_name(self): + return self.has_name_ + + def TryMerge(self, d): + while 1: + tt = d.getVarInt32() + if tt == 116: + break + if tt == 122: + self.set_type(d.getPrefixedString()) + continue + if tt == 128: + self.set_id(d.getVarInt64()) + continue + if tt == 138: + self.set_name(d.getPrefixedString()) + continue + + if tt == 0: + raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + +class PropertyValue_PointValue(ProtocolBuffer.ProtocolMessage): + has_x_ = 0 + x_ = 0.0 + has_y_ = 0 + y_ = 0.0 + + def x(self): + return self.x_ + + def set_x(self, x): + self.has_x_ = 1 + self.x_ = x + + def has_x(self): + return self.has_x_ + + def y(self): + return self.y_ + + def set_y(self, x): + self.has_y_ = 1 + self.y_ = x + + def has_y(self): + return self.has_y_ + + def TryMerge(self, d): + while 1: + tt = d.getVarInt32() + if tt == 44: + break + if tt == 49: + self.set_x(d.getDouble()) + continue + if tt == 57: + self.set_y(d.getDouble()) + continue + + if tt == 0: + raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + +class PropertyValue_ReferenceValue(ProtocolBuffer.ProtocolMessage): + has_app_ = 0 + app_ = "" + has_name_space_ = 0 + name_space_ = "" + has_database_id_ = 0 + database_id_ = "" + + def __init__(self): + self.pathelement_ = [] + + def app(self): + return self.app_ + + def set_app(self, x): + self.has_app_ = 1 + self.app_ = x + + def has_app(self): + return self.has_app_ + + def name_space(self): + return self.name_space_ + + def set_name_space(self, x): + self.has_name_space_ = 1 + self.name_space_ = x + + def has_name_space(self): + return self.has_name_space_ + + def pathelement_list(self): + return self.pathelement_ + + def add_pathelement(self): + x = PropertyValue_ReferenceValuePathElement() + self.pathelement_.append(x) + return x + + def database_id(self): + return self.database_id_ + + def set_database_id(self, x): + self.has_database_id_ = 1 + self.database_id_ = x + + def has_database_id(self): + return self.has_database_id_ + + def TryMerge(self, d): + while 1: + tt = d.getVarInt32() + if tt == 100: + break + if tt == 106: + self.set_app(d.getPrefixedString()) + continue + if tt == 115: + self.add_pathelement().TryMerge(d) + continue + if tt == 162: + self.set_name_space(d.getPrefixedString()) + continue + if tt == 186: + self.set_database_id(d.getPrefixedString()) + continue + + if tt == 0: + raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + +class PropertyValue_UserValue(ProtocolBuffer.ProtocolMessage): + has_email_ = 0 + email_ = "" + has_auth_domain_ = 0 + auth_domain_ = "" + has_nickname_ = 0 + nickname_ = "" + has_gaiaid_ = 0 + gaiaid_ = 0 + has_obfuscated_gaiaid_ = 0 + obfuscated_gaiaid_ = "" + + def email(self): + return self.email_ + + def set_email(self, x): + self.has_email_ = 1 + self.email_ = x + + def auth_domain(self): + return self.auth_domain_ + + def set_auth_domain(self, x): + self.has_auth_domain_ = 1 + self.auth_domain_ = x + + def obfuscated_gaiaid(self): + return self.obfuscated_gaiaid_ + + def set_obfuscated_gaiaid(self, x): + self.has_obfuscated_gaiaid_ = 1 + self.obfuscated_gaiaid_ = x + + +class PropertyValue(ProtocolBuffer.ProtocolMessage): + has_int64value_ = 0 + int64value_ = 0 + has_booleanvalue_ = 0 + booleanvalue_ = 0 + has_stringvalue_ = 0 + stringvalue_ = "" + has_doublevalue_ = 0 + doublevalue_ = 0.0 + has_pointvalue_ = 0 + pointvalue_ = None + has_uservalue_ = 0 + uservalue_ = None + has_referencevalue_ = 0 + referencevalue_ = None + + def int64value(self): + return self.int64value_ + + def set_int64value(self, x): + self.has_int64value_ = 1 + self.int64value_ = x + + def has_int64value(self): + return self.has_int64value_ + + def booleanvalue(self): + return self.booleanvalue_ + + def set_booleanvalue(self, x): + self.has_booleanvalue_ = 1 + self.booleanvalue_ = x + + def has_booleanvalue(self): + return self.has_booleanvalue_ + + def stringvalue(self): + return self.stringvalue_ + + def set_stringvalue(self, x): + self.has_stringvalue_ = 1 + self.stringvalue_ = x + + def has_stringvalue(self): + return self.has_stringvalue_ + + def doublevalue(self): + return self.doublevalue_ + + def set_doublevalue(self, x): + self.has_doublevalue_ = 1 + self.doublevalue_ = x + + def has_doublevalue(self): + return self.has_doublevalue_ + + def pointvalue(self): + if self.pointvalue_ is None: + self.pointvalue_ = PropertyValue_PointValue() + return self.pointvalue_ + + def mutable_pointvalue(self): + self.has_pointvalue_ = 1 + return self.pointvalue() + + def has_pointvalue(self): + return self.has_pointvalue_ + + def referencevalue(self): + if self.referencevalue_ is None: + self.referencevalue_ = PropertyValue_ReferenceValue() + return self.referencevalue_ + + def mutable_referencevalue(self): + self.has_referencevalue_ = 1 + return self.referencevalue() + + def has_referencevalue(self): + return self.has_referencevalue_ + + def uservalue(self): + if self.uservalue_ is None: + self.uservalue_ = PropertyValue_UserValue() + return self.uservalue_ + + def mutable_uservalue(self): + self.has_uservalue_ = 1 + return self.uservalue() + + def has_uservalue(self): + return self.has_uservalue_ + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 8: + self.set_int64value(d.getVarInt64()) + continue + if tt == 16: + self.set_booleanvalue(d.getBoolean()) + continue + if tt == 26: + self.set_stringvalue(d.getPrefixedString()) + continue + if tt == 33: + self.set_doublevalue(d.getDouble()) + continue + if tt == 43: + self.mutable_pointvalue().TryMerge(d) + continue + if tt == 99: + self.mutable_referencevalue().TryMerge(d) + continue + + if tt == 0: + raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + +class Property(ProtocolBuffer.ProtocolMessage): + NO_MEANING = 0 + BLOB = 14 + TEXT = 15 + BYTESTRING = 16 + ATOM_CATEGORY = 1 + ATOM_LINK = 2 + ATOM_TITLE = 3 + ATOM_CONTENT = 4 + ATOM_SUMMARY = 5 + ATOM_AUTHOR = 6 + GD_WHEN = 7 + GD_EMAIL = 8 + GEORSS_POINT = 9 + GD_IM = 10 + GD_PHONENUMBER = 11 + GD_POSTALADDRESS = 12 + GD_RATING = 13 + BLOBKEY = 17 + ENTITY_PROTO = 19 + INDEX_VALUE = 18 + EMPTY_LIST = 24 + + _Meaning_NAMES = { + 0: "NO_MEANING", + 14: "BLOB", + 15: "TEXT", + 16: "BYTESTRING", + 1: "ATOM_CATEGORY", + 2: "ATOM_LINK", + 3: "ATOM_TITLE", + 4: "ATOM_CONTENT", + 5: "ATOM_SUMMARY", + 6: "ATOM_AUTHOR", + 7: "GD_WHEN", + 8: "GD_EMAIL", + 9: "GEORSS_POINT", + 10: "GD_IM", + 11: "GD_PHONENUMBER", + 12: "GD_POSTALADDRESS", + 13: "GD_RATING", + 17: "BLOBKEY", + 19: "ENTITY_PROTO", + 18: "INDEX_VALUE", + 24: "EMPTY_LIST", + } + + def Meaning_Name(cls, x): + return cls._Meaning_NAMES.get(x, "") + + Meaning_Name = classmethod(Meaning_Name) + + has_meaning_ = 0 + meaning_ = 0 + has_meaning_uri_ = 0 + meaning_uri_ = "" + has_name_ = 0 + name_ = "" + has_value_ = 0 + has_multiple_ = 0 + multiple_ = 0 + has_stashed_ = 0 + stashed_ = -1 + has_computed_ = 0 + computed_ = 0 + + def __init__(self): + self.value_ = PropertyValue() + + def meaning(self): + return self.meaning_ + + def set_meaning(self, x): + self.has_meaning_ = 1 + self.meaning_ = x + + def has_meaning(self): + return self.has_meaning_ + + def meaning_uri(self): + return self.meaning_uri_ + + def set_meaning_uri(self, x): + self.has_meaning_uri_ = 1 + self.meaning_uri_ = x + + def has_meaning_uri(self): + return self.has_meaning_uri_ + + def name(self): + return self.name_ + + def set_name(self, x): + self.has_name_ = 1 + self.name_ = x + + def has_name(self): + return self.has_name_ + + def value(self): + return self.value_ + + def mutable_value(self): + self.has_value_ = 1 + return self.value_ + + def has_value(self): + return self.has_value_ + + def multiple(self): + return self.multiple_ + + def set_multiple(self, x): + self.has_multiple_ = 1 + self.multiple_ = x + + def has_multiple(self): + return self.has_multiple_ + + def stashed(self): + return self.stashed_ + + def set_stashed(self, x): + self.has_stashed_ = 1 + self.stashed_ = x + + def has_stashed(self): + return self.has_stashed_ + + def computed(self): + return self.computed_ + + def set_computed(self, x): + self.has_computed_ = 1 + self.computed_ = x + + def has_computed(self): + return self.has_computed_ + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 8: + self.set_meaning(d.getVarInt32()) + continue + if tt == 18: + self.set_meaning_uri(d.getPrefixedString()) + continue + if tt == 26: + self.set_name(d.getPrefixedString()) + continue + if tt == 32: + self.set_multiple(d.getBoolean()) + continue + if tt == 42: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.mutable_value().TryMerge(tmp) + continue + if tt == 48: + self.set_stashed(d.getVarInt32()) + continue + if tt == 56: + self.set_computed(d.getBoolean()) + continue + + if tt == 0: + raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + +class Path_Element(ProtocolBuffer.ProtocolMessage): + has_type_ = 0 + type_ = "" + has_id_ = 0 + id_ = 0 + has_name_ = 0 + name_ = "" + + @property + def type(self): + # Force legacy byte-str to be a str. + if type(self.type_) is bytes: + return self.type_.decode() + return self.type_ + + def set_type(self, x): + self.has_type_ = 1 + self.type_ = x + + def has_type(self): + return self.has_type_ + + @property + def id(self): + return self.id_ + + def set_id(self, x): + self.has_id_ = 1 + self.id_ = x + + def has_id(self): + return self.has_id_ + + @property + def name(self): + return self.name_ + + def set_name(self, x): + self.has_name_ = 1 + self.name_ = x + + def has_name(self): + return self.has_name_ + + def TryMerge(self, d): + while 1: + tt = d.getVarInt32() + if tt == 12: + break + if tt == 18: + self.set_type(d.getPrefixedString()) + continue + if tt == 24: + self.set_id(d.getVarInt64()) + continue + if tt == 34: + self.set_name(d.getPrefixedString()) + continue + + if tt == 0: + raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + +class Path(ProtocolBuffer.ProtocolMessage): + def __init__(self): + self.element_ = [] + + @property + def element(self): + return self.element_ + + def element_list(self): + return self.element_ + + def element_size(self): + return len(self.element_) + + def add_element(self): + x = Path_Element() + self.element_.append(x) + return x + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 11: + self.add_element().TryMerge(d) + continue + + if tt == 0: + raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + +class Reference(ProtocolBuffer.ProtocolMessage): + has_app_ = 0 + app_ = "" + has_name_space_ = 0 + name_space_ = "" + has_path_ = 0 + has_database_id_ = 0 + database_id_ = "" + + def __init__(self): + self.path_ = Path() + + @property + def app(self): + return self.app_ + + def set_app(self, x): + self.has_app_ = 1 + self.app_ = x + + def has_app(self): + return self.has_app_ + + @property + def name_space(self): + return self.name_space_ + + def set_name_space(self, x): + self.has_name_space_ = 1 + self.name_space_ = x + + def has_name_space(self): + return self.has_name_space_ + + @property + def path(self): + return self.path_ + + def mutable_path(self): + self.has_path_ = 1 + return self.path_ + + def has_path(self): + return self.has_path_ + + @property + def database_id(self): + return self.database_id_ + + def set_database_id(self, x): + self.has_database_id_ = 1 + self.database_id_ = x + + def has_database_id(self): + return self.has_database_id_ + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 106: + self.set_app(d.getPrefixedString()) + continue + if tt == 114: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.mutable_path().TryMerge(tmp) + continue + if tt == 162: + self.set_name_space(d.getPrefixedString()) + continue + if tt == 186: + self.set_database_id(d.getPrefixedString()) + continue + + if tt == 0: + raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + +class EntityProto(ProtocolBuffer.ProtocolMessage): + has_key_ = 0 + has_owner_ = 0 + owner_ = None + has_kind_ = 0 + kind_ = 0 + has_kind_uri_ = 0 + kind_uri_ = "" + + def __init__(self): + self.key_ = Reference() + self.property_ = [] + + def key(self): + return self.key_ + + def mutable_key(self): + self.has_key_ = 1 + return self.key_ + + def has_key(self): + return self.has_key_ + + def kind(self): + return self.kind_ + + def set_kind(self, x): + self.has_kind_ = 1 + self.kind_ = x + + def has_kind(self): + return self.has_kind_ + + def kind_uri(self): + return self.kind_uri_ + + def set_kind_uri(self, x): + self.has_kind_uri_ = 1 + self.kind_uri_ = x + + def has_kind_uri(self): + return self.has_kind_uri_ + + def property_list(self): + return self.property_ + + def add_property(self): + x = Property() + self.property_.append(x) + return x + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 32: + self.set_kind(d.getVarInt32()) + continue + if tt == 42: + self.set_kind_uri(d.getPrefixedString()) + continue + if tt == 106: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.mutable_key().TryMerge(tmp) + continue + if tt == 114: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.add_property().TryMerge(tmp) + continue + if tt == 122: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.add_property().TryMerge(tmp) + continue + + if tt == 0: + raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + def _get_property_value(self, prop): + if prop.has_stringvalue(): + return prop.stringvalue() + if prop.has_int64value(): + return prop.int64value() + if prop.has_booleanvalue(): + return prop.booleanvalue() + if prop.has_doublevalue(): + return prop.doublevalue() + if prop.has_pointvalue(): + return prop.pointvalue() + if prop.has_referencevalue(): + return prop.referencevalue() + return None + + def entity_props(self): + entity_props = {} + for prop in self.property_list(): + name = prop.name().decode("utf-8") + entity_props[name] = ( + self._get_property_value(prop.value()) if prop.has_value() else None + ) + return entity_props + + +__all__ = [ + "PropertyValue", + "PropertyValue_ReferenceValuePathElement", + "PropertyValue_PointValue", + "PropertyValue_ReferenceValue", + "Property", + "Path", + "Path_Element", + "Reference", + "EntityProto", +] diff --git a/packages/google-cloud-ndb/google/cloud/ndb/_legacy_protocol_buffer.py b/packages/google-cloud-ndb/google/cloud/ndb/_legacy_protocol_buffer.py new file mode 100644 index 000000000000..0b10f0b4674a --- /dev/null +++ b/packages/google-cloud-ndb/google/cloud/ndb/_legacy_protocol_buffer.py @@ -0,0 +1,207 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import array +import struct + + +# Python 3 doesn't have "long" anymore +long = int + + +class ProtocolBufferDecodeError(Exception): + pass + + +class ProtocolMessage: + def MergePartialFromString(self, s): + a = array.array("B") + a.frombytes(s) + d = Decoder(a, 0, len(a)) + self.TryMerge(d) + + +class Decoder: + NUMERIC = 0 + DOUBLE = 1 + STRING = 2 + STARTGROUP = 3 + ENDGROUP = 4 + FLOAT = 5 + MAX_TYPE = 6 + + def __init__(self, buf, idx, limit): + self.buf = buf + self.idx = idx + self.limit = limit + return + + def avail(self): + return self.limit - self.idx + + def buffer(self): + return self.buf + + def pos(self): + return self.idx + + def skip(self, n): + if self.idx + n > self.limit: + raise ProtocolBufferDecodeError("truncated") + self.idx += n + return + + def skipData(self, tag): + t = tag & 7 + if t == self.NUMERIC: + self.getVarInt64() + elif t == self.DOUBLE: + self.skip(8) + elif t == self.STRING: + n = self.getVarInt32() + self.skip(n) + elif t == self.STARTGROUP: + while 1: + t = self.getVarInt32() + if (t & 7) == self.ENDGROUP: + break + else: + self.skipData(t) + if (t - self.ENDGROUP) != (tag - self.STARTGROUP): + raise ProtocolBufferDecodeError("corrupted") + elif t == self.ENDGROUP: + raise ProtocolBufferDecodeError("corrupted") + elif t == self.FLOAT: + self.skip(4) + else: + raise ProtocolBufferDecodeError("corrupted") + + def get8(self): + if self.idx >= self.limit: + raise ProtocolBufferDecodeError("truncated") + c = self.buf[self.idx] + self.idx += 1 + return c + + def get16(self): + if self.idx + 2 > self.limit: + raise ProtocolBufferDecodeError("truncated") + c = self.buf[self.idx] + d = self.buf[self.idx + 1] + self.idx += 2 + return (d << 8) | c + + def get32(self): + if self.idx + 4 > self.limit: + raise ProtocolBufferDecodeError("truncated") + c = self.buf[self.idx] + d = self.buf[self.idx + 1] + e = self.buf[self.idx + 2] + f = long(self.buf[self.idx + 3]) + self.idx += 4 + return (f << 24) | (e << 16) | (d << 8) | c + + def get64(self): + if self.idx + 8 > self.limit: + raise ProtocolBufferDecodeError("truncated") + c = self.buf[self.idx] + d = self.buf[self.idx + 1] + e = self.buf[self.idx + 2] + f = long(self.buf[self.idx + 3]) + g = long(self.buf[self.idx + 4]) + h = long(self.buf[self.idx + 5]) + i = long(self.buf[self.idx + 6]) + j = long(self.buf[self.idx + 7]) + self.idx += 8 + return ( + (j << 56) + | (i << 48) + | (h << 40) + | (g << 32) + | (f << 24) + | (e << 16) + | (d << 8) + | c + ) + + def getVarInt32(self): + b = self.get8() + if not (b & 128): + return b + + result = long(0) + shift = 0 + + while 1: + result |= long(b & 127) << shift + shift += 7 + if not (b & 128): + break + if shift >= 64: + raise ProtocolBufferDecodeError("corrupted") + b = self.get8() + + if result >= 0x8000000000000000: + result -= 0x10000000000000000 + + if result >= 0x80000000 or result < -0x80000000: + raise ProtocolBufferDecodeError("corrupted") + return result + + def getVarInt64(self): + result = self.getVarUint64() + if result >= (1 << 63): + result -= 1 << 64 + return result + + def getVarUint64(self): + result = long(0) + shift = 0 + while 1: + if shift >= 64: + raise ProtocolBufferDecodeError("corrupted") + b = self.get8() + result |= long(b & 127) << shift + shift += 7 + if not (b & 128): + return result + + def getDouble(self): + if self.idx + 8 > self.limit: + raise ProtocolBufferDecodeError("truncated") + a = self.buf[self.idx : self.idx + 8] # noqa: E203 + self.idx += 8 + return struct.unpack(" self.limit: + raise ProtocolBufferDecodeError("truncated") + r = self.buf[self.idx : self.idx + length] # noqa: E203 + self.idx += length + return r.tobytes() + + +__all__ = [ + "ProtocolMessage", + "Decoder", + "ProtocolBufferDecodeError", +] diff --git a/packages/google-cloud-ndb/google/cloud/ndb/_options.py b/packages/google-cloud-ndb/google/cloud/ndb/_options.py new file mode 100644 index 000000000000..d6caf13a20ee --- /dev/null +++ b/packages/google-cloud-ndb/google/cloud/ndb/_options.py @@ -0,0 +1,233 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Support for options.""" + +import functools +import itertools +import logging + +from google.cloud.ndb import exceptions + +log = logging.getLogger(__name__) + + +class Options(object): + __slots__ = ( + # Supported + "retries", + "timeout", + "use_cache", + "use_global_cache", + "global_cache_timeout", + "use_datastore", + # Deprecated + "force_writes", + "max_memcache_items", + "propagation", + "deadline", + "use_memcache", + "memcache_timeout", + ) + + @classmethod + def options_or_model_properties(cls, wrapped): + return cls.options(wrapped, _disambiguate_from_model_properties=True) + + @classmethod + def options(cls, wrapped, _disambiguate_from_model_properties=False): + slots = set(cls.slots()) + # If there are any positional arguments, get their names. + # inspect.signature is not available in Python 2.7, so we use the + # arguments obtained with inspect.getargspec, which come from the + # positional decorator used with all query_options decorated methods. + positional = getattr(wrapped, "_positional_names", []) + + # We need for any non-option arguments to come before any option + # arguments + in_options = False + for name in positional: + if name in slots: + in_options = True + + elif in_options and name != "_options": + raise TypeError( + "All positional non-option arguments must precede option " + "arguments in function signature." + ) + + @functools.wraps(wrapped) + def wrapper(*args, **kwargs): + pass_args = [] + kw_options = {} + + # Process positional args + for name, value in zip(positional, args): + if name in slots: + kw_options[name] = value + + else: + pass_args.append(value) + + if _disambiguate_from_model_properties: + model_class = args[0] + get_arg = model_class._get_arg + + else: + + def get_arg(kwargs, name): + return kwargs.pop(name, None) + + # Process keyword args + for name in slots: + if name not in kw_options: + kw_options[name] = get_arg(kwargs, name) + + # If another function that uses options is delegating to this one, + # we'll already have options. + if "_options" not in kwargs: + kwargs["_options"] = cls(**kw_options) + + return wrapped(*pass_args, **kwargs) + + return wrapper + + @classmethod + def slots(cls): + return itertools.chain( + *( + ancestor.__slots__ + for ancestor in cls.__mro__ + if hasattr(ancestor, "__slots__") + ) + ) + + def __init__(self, config=None, **kwargs): + cls = type(self) + if config is not None and not isinstance(config, cls): + raise TypeError("Config must be a {} instance.".format(cls.__name__)) + + deadline = kwargs.pop("deadline", None) + if deadline is not None: + timeout = kwargs.get("timeout") + if timeout: + raise TypeError("Can't specify both 'deadline' and 'timeout'") + kwargs["timeout"] = deadline + + memcache_timeout = kwargs.pop("memcache_timeout", None) + if memcache_timeout is not None: + global_cache_timeout = kwargs.get("global_cache_timeout") + if global_cache_timeout is not None: + raise TypeError( + "Can't specify both 'memcache_timeout' and " + "'global_cache_timeout'" + ) + kwargs["global_cache_timeout"] = memcache_timeout + + use_memcache = kwargs.pop("use_memcache", None) + if use_memcache is not None: + use_global_cache = kwargs.get("use_global_cache") + if use_global_cache is not None: + raise TypeError( + "Can't specify both 'use_memcache' and 'use_global_cache'" + ) + kwargs["use_global_cache"] = use_memcache + + for key in self.slots(): + default = getattr(config, key, None) if config else None + setattr(self, key, kwargs.pop(key, default)) + + if kwargs.pop("xg", False): + log.warning( + "Use of the 'xg' option is deprecated. All transactions are " + "cross group (up to 25 groups) transactions, by default. This " + "option is ignored." + ) + + if kwargs: + raise TypeError( + "{} got an unexpected keyword argument '{}'".format( + type(self).__name__, next(iter(kwargs)) + ) + ) + + if self.max_memcache_items is not None: + raise exceptions.NoLongerImplementedError() + + if self.force_writes is not None: + raise exceptions.NoLongerImplementedError() + + if self.propagation is not None: + raise exceptions.NoLongerImplementedError() + + def __eq__(self, other): + if type(self) is not type(other): + return NotImplemented + + for key in self.slots(): + if getattr(self, key, None) != getattr(other, key, None): + return False + + return True + + def __ne__(self, other): + # required for Python 2.7 compatibility + result = self.__eq__(other) + if result is NotImplemented: + result = False + return not result + + def __repr__(self): + options = ", ".join( + [ + "{}={}".format(key, repr(getattr(self, key, None))) + for key in self.slots() + if getattr(self, key, None) is not None + ] + ) + return "{}({})".format(type(self).__name__, options) + + def copy(self, **kwargs): + return type(self)(config=self, **kwargs) + + def items(self): + for name in self.slots(): + yield name, getattr(self, name, None) + + +class ReadOptions(Options): + __slots__ = ("read_consistency", "read_policy", "transaction") + + def __init__(self, config=None, **kwargs): + read_policy = kwargs.pop("read_policy", None) + if read_policy: + log.warning( + "Use of the 'read_policy' options is deprecated. Please use " + "'read_consistency'" + ) + if kwargs.get("read_consistency"): + raise TypeError( + "Cannot use both 'read_policy' and 'read_consistency' " "options." + ) + kwargs["read_consistency"] = read_policy + + if not kwargs.get("transaction"): + # Avoid circular import in Python 2.7 + from google.cloud.ndb import context as context_module + + context = context_module.get_context(False) + if context: + kwargs["transaction"] = context.transaction + + super(ReadOptions, self).__init__(config=config, **kwargs) diff --git a/packages/google-cloud-ndb/google/cloud/ndb/_remote.py b/packages/google-cloud-ndb/google/cloud/ndb/_remote.py new file mode 100644 index 000000000000..193a7ba7620a --- /dev/null +++ b/packages/google-cloud-ndb/google/cloud/ndb/_remote.py @@ -0,0 +1,84 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""A class for information about remote calls.""" + +# In its own module to avoid circular import between _datastore_api and +# tasklets modules. +import grpc +import time + +from google.cloud.ndb import exceptions + + +class RemoteCall(object): + """Represents a remote call. + + This is primarily a wrapper for futures returned by gRPC. This holds some + information about the call to make debugging easier. Can be used for + anything that returns a future for something running outside of our own + event loop. + + Arguments: + future (Union[grpc.Future, tasklets.Future]): The future handed back + from initiating the call. + info (str): Helpful human readable string about the call. This string + will be handed back verbatim by calls to :meth:`__repr__`. + """ + + def __init__(self, future, info): + self.future = future + self.info = info + self.start_time = time.time() + self.elapsed_time = 0 + + def record_time(future): + self.elapsed_time = time.time() - self.start_time + + future.add_done_callback(record_time) + + def __repr__(self): + return self.info + + def exception(self): + """Calls :meth:`grpc.Future.exception` on :attr:`future`.""" + # GRPC will actually raise FutureCancelledError. + # We'll translate that to our own Cancelled exception and *return* it, + # which is far more polite for a method that *returns exceptions*. + try: + return self.future.exception() + except grpc.FutureCancelledError: + return exceptions.Cancelled() + + def result(self): + """Calls :meth:`grpc.Future.result` on :attr:`future`.""" + return self.future.result() + + def add_done_callback(self, callback): + """Add a callback function to be run upon task completion. Will run + immediately if task has already finished. + + Args: + callback (Callable): The function to execute. + """ + remote = self + + def wrapper(rpc): + return callback(remote) + + self.future.add_done_callback(wrapper) + + def cancel(self): + """Calls :meth:`grpc.Future.cancel` on attr:`cancel`.""" + return self.future.cancel() diff --git a/packages/google-cloud-ndb/google/cloud/ndb/_retry.py b/packages/google-cloud-ndb/google/cloud/ndb/_retry.py new file mode 100644 index 000000000000..cef5f516539d --- /dev/null +++ b/packages/google-cloud-ndb/google/cloud/ndb/_retry.py @@ -0,0 +1,147 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Retry functions.""" + +import functools +import itertools + +from google.api_core import retry as core_retry +from google.api_core import exceptions as core_exceptions +from google.cloud.ndb import exceptions +from google.cloud.ndb import tasklets + +_DEFAULT_INITIAL_DELAY = 1.0 # seconds +_DEFAULT_MAXIMUM_DELAY = 60.0 # seconds +_DEFAULT_DELAY_MULTIPLIER = 2.0 +_DEFAULT_RETRIES = 3 + + +def wraps_safely(obj, attr_names=functools.WRAPPER_ASSIGNMENTS): + """Python 2.7 functools.wraps has a bug where attributes like ``module`` + are not copied to the wrappers and thus cause attribute errors. This + wrapper prevents that problem.""" + return functools.wraps( + obj, assigned=(name for name in attr_names if hasattr(obj, name)) + ) + + +def retry_async(callback, retries=_DEFAULT_RETRIES): + """Decorator for retrying functions or tasklets asynchronously. + + The `callback` will be called up to `retries + 1` times. Any transient + API errors (internal server errors) raised by `callback` will be caught and + `callback` will be retried until the call either succeeds, raises a + non-transient error, or the number of retries is exhausted. + + See: :func:`google.api_core.retry.if_transient_error` for information on + what kind of errors are considered transient. + + Args: + callback (Callable): The function to be tried. May be a tasklet. + retries (Integer): Number of times to retry `callback`. Will try up to + `retries + 1` times. + + Returns: + tasklets.Future: Result will be the return value of `callback`. + """ + + @tasklets.tasklet + @wraps_safely(callback) + def retry_wrapper(*args, **kwargs): + from google.cloud.ndb import context as context_module + + sleep_generator = core_retry.exponential_sleep_generator( + _DEFAULT_INITIAL_DELAY, + _DEFAULT_MAXIMUM_DELAY, + _DEFAULT_DELAY_MULTIPLIER, + ) + + for sleep_time in itertools.islice(sleep_generator, retries + 1): + context = context_module.get_context() + if not context.in_retry(): + # We need to be able to identify if we are inside a nested + # retry. Here, we set the retry state in the context. This is + # used for deciding if an exception should be raised + # immediately or passed up to the outer retry block. + context.set_retry_state(repr(callback)) + try: + result = callback(*args, **kwargs) + if isinstance(result, tasklets.Future): + result = yield result + except exceptions.NestedRetryException as e: + error = e + except BaseException as e: + # `e` is removed from locals at end of block + error = e # See: https://goo.gl/5J8BMK + + if not is_transient_error(error): + # If we are in an inner retry block, use special nested + # retry exception to bubble up to outer retry. Else, raise + # actual exception. + if context.get_retry_state() != repr(callback): + message = getattr(error, "message", str(error)) + raise exceptions.NestedRetryException(message) + else: + raise error + else: + raise tasklets.Return(result) + finally: + # No matter what, if we are exiting the top level retry, + # clear the retry state in the context. + if context.get_retry_state() == repr(callback): # pragma: NO BRANCH + context.clear_retry_state() + + yield tasklets.sleep(sleep_time) + + # Unknown errors really want to show up as None, so manually set the error. + if isinstance(error, core_exceptions.Unknown): + error = "google.api_core.exceptions.Unknown" + + raise core_exceptions.RetryError( + "Maximum number of {} retries exceeded while calling {}".format( + retries, callback + ), + cause=error, + ) + + return retry_wrapper + + +# Possibly we should include DeadlineExceeded. The caveat is that I think the +# timeout is enforced on the client side, so it might be possible that a Commit +# request times out on the client side, but still writes data on the server +# side, in which case we don't want to retry, since we can't commit the same +# transaction more than once. Some more research is needed here. If we discover +# that a DeadlineExceeded error guarantees the operation was cancelled, then we +# can add DeadlineExceeded to our retryable errors. Not knowing the answer, +# it's best not to take that risk. +TRANSIENT_ERRORS = ( + core_exceptions.ServiceUnavailable, + core_exceptions.InternalServerError, + core_exceptions.Aborted, + core_exceptions.Unknown, +) + + +def is_transient_error(error): + """Determine whether an error is transient. + + Returns: + bool: True if error is transient, else False. + """ + if core_retry.if_transient_error(error): + return True + + return isinstance(error, TRANSIENT_ERRORS) diff --git a/packages/google-cloud-ndb/google/cloud/ndb/_transaction.py b/packages/google-cloud-ndb/google/cloud/ndb/_transaction.py new file mode 100644 index 000000000000..f07d752ca92b --- /dev/null +++ b/packages/google-cloud-ndb/google/cloud/ndb/_transaction.py @@ -0,0 +1,479 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import functools +import logging + +from google.cloud.ndb import exceptions +from google.cloud.ndb import _retry +from google.cloud.ndb import tasklets +from google.cloud.ndb import utils + +log = logging.getLogger(__name__) + + +class _Propagation(object): + """This class aims to emulate the same behaviour as was provided by the old + Datastore RPC library. + + https://cloud.google.com/appengine/docs/standard/python/ndb/functions#context_options + + It provides limited support for transactions within transactions. It has a + single public method func:`handle_propagation`. + + Args: + propagation (int): The desired `propagation` option, corresponding + to a class:`TransactionOptions` option. + join (:obj:`bool`, optional): If the provided join argument must be + changed to conform to the requested propagation option then a + warning will be emitted. If it is not provided, it will be set + according to the propagation option but no warning is emitted. + """ + + def __init__(self, propagation, join=None): + # Avoid circular import in Python 2.7 + from google.cloud.ndb import context as context_module + + propagation_options = context_module.TransactionOptions._PROPAGATION + if propagation is None or propagation in propagation_options: + self.propagation = propagation + else: + raise ValueError( + "Unexpected value for propagation. Got: {}. Expected one of: " + "{}".format(propagation, propagation_options) + ) + + propagation_names = context_module.TransactionOptions._INT_TO_NAME + self.propagation_name = propagation_names.get(self.propagation) + + self.join = join + joinable_options = context_module.TransactionOptions._JOINABLE + self.joinable = propagation in joinable_options + + def _handle_nested(self): + """The NESTED propagation policy would commit all changes in the outer + and inner transactions together when the outer policy commits. However, + if an exception is thrown in the inner transaction all changes there + would get thrown out but allow the outer transaction to optionally + recover and continue. The NESTED policy is not supported. If you use + this policy, your code will throw a BadRequestError exception. + """ + raise exceptions.BadRequestError("Nested transactions are not supported.") + + def _handle_mandatory(self): + """Always propagate an existing transaction; throw an exception if + there is no existing transaction. If a function that uses this policy + throws an exception, it's probably not safe to catch the exception and + commit the outer transaction; the function may have left the outer + transaction in a bad state. + """ + if not in_transaction(): + raise exceptions.BadRequestError("Requires an existing transaction.") + + def _handle_allowed(self): + """If there is an existing transaction, propagate it. If a function + that uses this policy throws an exception, it's probably not safe to + catch the exception and commit the outer transaction; the function may + have left the outer transaction in a bad state. + """ + # no special handling needed. + pass + + def _handle_independent(self): + """Always use a new transaction, "pausing" any existing transactions. + A function that uses this policy should not return any entities read in + the new transaction, as the entities are not transactionally consistent + with the caller's transaction. + """ + if in_transaction(): + # Avoid circular import in Python 2.7 + from google.cloud.ndb import context as context_module + + context = context_module.get_context() + new_context = context.new(transaction=None) + return new_context + + def _handle_join(self): + change_to = self.joinable + if self.join != change_to: + if self.join is not None: + logging.warning( + "Modifying join behaviour to maintain old NDB behaviour. " + "Setting join to {} for propagation value: {} ({})".format( + change_to, self.propagation, self.propagation_name + ) + ) + self.join = change_to + + def handle_propagation(self): + """Ensure the conditions needed to maintain legacy NDB behaviour are + met. + + Returns: + Context: A new :class:`Context` instance that should be + used to run the transaction in or :data:`None` if the + transaction should run in the existing :class:`Context`. + bool: :data:`True` if the new transaction is to be joined to an + existing one otherwise :data:`False`. + """ + context = None + if self.propagation: + # ensure we use the correct joining method. + context = getattr(self, "_handle_{}".format(self.propagation_name))() + self._handle_join() + return context, self.join + + +def in_transaction(): + """Determine if there is a currently active transaction. + + Returns: + bool: :data:`True` if there is a transaction for the current context, + otherwise :data:`False`. + """ + # Avoid circular import in Python 2.7 + from google.cloud.ndb import context as context_module + + return context_module.get_context().transaction is not None + + +def transaction( + callback, + retries=_retry._DEFAULT_RETRIES, + read_only=False, + join=False, + xg=True, + propagation=None, +): + """Run a callback in a transaction. + + Args: + callback (Callable): The function or tasklet to be called. + retries (int): Number of times to potentially retry the callback in + case of transient server errors. + read_only (bool): Whether to run the transaction in read only mode. + join (bool): In the event of an already running transaction, if `join` + is `True`, `callback` will be run in the already running + transaction, otherwise an exception will be raised. Transactions + cannot be nested. + xg (bool): Enable cross-group transactions. This argument is included + for backwards compatibility reasons and is ignored. All Datastore + transactions are cross-group, up to 25 entity groups, all the time. + propagation (int): An element from :class:`ndb.TransactionOptions`. + This parameter controls what happens if you try to start a new + transaction within an existing transaction. If this argument is + provided, the `join` argument will be ignored. + """ + future = transaction_async( + callback, + retries=retries, + read_only=read_only, + join=join, + xg=xg, + propagation=propagation, + ) + return future.result() + + +def transaction_async( + callback, + retries=_retry._DEFAULT_RETRIES, + read_only=False, + join=False, + xg=True, + propagation=None, +): + new_context, join = _Propagation(propagation, join).handle_propagation() + args = (callback, retries, read_only, join, xg, None) + if new_context is None: + transaction_return_value = transaction_async_(*args) + else: + with new_context.use() as context: + transaction_return_value = transaction_async_(*args) + context.flush() + return transaction_return_value + + +def transaction_async_( + callback, + retries=_retry._DEFAULT_RETRIES, + read_only=False, + join=False, + xg=True, + propagation=None, +): + """Run a callback in a transaction. + + This is the asynchronous version of :func:`transaction`. + """ + # Avoid circular import in Python 2.7 + from google.cloud.ndb import context as context_module + + if propagation is not None: + raise exceptions.NoLongerImplementedError() + + context = context_module.get_context() + if context.transaction: + if join: + result = callback() + if not isinstance(result, tasklets.Future): + future = tasklets.Future() + future.set_result(result) + result = future + return result + else: + raise NotImplementedError( + "Transactions may not be nested. Pass 'join=True' in order to " + "join an already running transaction." + ) + + tasklet = functools.partial( + _transaction_async, context, callback, read_only=read_only + ) + if retries: + tasklet = _retry.retry_async(tasklet, retries=retries) + + return tasklet() + + +@tasklets.tasklet +def _transaction_async(context, callback, read_only=False): + # Avoid circular import in Python 2.7 + from google.cloud.ndb import _datastore_api + + # Start the transaction + utils.logging_debug(log, "Start transaction") + transaction_id = yield _datastore_api.begin_transaction(read_only, retries=0) + utils.logging_debug(log, "Transaction Id: {}", transaction_id) + + on_commit_callbacks = [] + transaction_complete_callbacks = [] + tx_context = context.new( + transaction=transaction_id, + on_commit_callbacks=on_commit_callbacks, + transaction_complete_callbacks=transaction_complete_callbacks, + batches=None, + commit_batches=None, + cache=None, + # We could just pass `None` here and let the `Context` constructor + # instantiate a new event loop, but our unit tests inject a subclass of + # `EventLoop` that makes testing a little easier. This makes sure the + # new event loop is of the same type as the current one, to propagate + # the event loop class used for testing. + eventloop=type(context.eventloop)(), + retry=context.get_retry_state(), + ) + + # The outer loop is dependent on the inner loop + def run_inner_loop(inner_context): + with inner_context.use(): + if inner_context.eventloop.run1(): + return True # schedule again + + context.eventloop.add_idle(run_inner_loop, tx_context) + + with tx_context.use(): + try: + try: + # Run the callback + result = callback() + if isinstance(result, tasklets.Future): + result = yield result + + # Make sure we've run everything we can run before calling commit + _datastore_api.prepare_to_commit(transaction_id) + tx_context.eventloop.run() + + # Commit the transaction + yield _datastore_api.commit(transaction_id, retries=0) + + # Rollback if there is an error + except Exception as e: # noqa: E722 + tx_context.cache.clear() + yield _datastore_api.rollback(transaction_id) + raise e + + for callback in on_commit_callbacks: + callback() + + finally: + for callback in transaction_complete_callbacks: + callback() + + raise tasklets.Return(result) + + +def transactional( + retries=_retry._DEFAULT_RETRIES, + read_only=False, + join=True, + xg=True, + propagation=None, +): + """A decorator to run a function automatically in a transaction. + + Usage example: + + @transactional(retries=1, read_only=False) + def callback(args): + ... + + Unlike func:`transaction`_, the ``join`` argument defaults to ``True``, + making functions decorated with func:`transactional`_ composable, by + default. IE, a function decorated with ``transactional`` can call another + function decorated with ``transactional`` and the second function will be + executed in the already running transaction. + + See google.cloud.ndb.transaction for available options. + """ + + def transactional_wrapper(wrapped): + @functools.wraps(wrapped) + def transactional_inner_wrapper(*args, **kwargs): + def callback(): + return wrapped(*args, **kwargs) + + return transaction( + callback, + retries=retries, + read_only=read_only, + join=join, + xg=xg, + propagation=propagation, + ) + + return transactional_inner_wrapper + + return transactional_wrapper + + +def transactional_async( + retries=_retry._DEFAULT_RETRIES, + read_only=False, + join=True, + xg=True, + propagation=None, +): + """A decorator to run a function in an async transaction. + + Usage example: + + @transactional_async(retries=1, read_only=False) + def callback(args): + ... + + Unlike func:`transaction`_, the ``join`` argument defaults to ``True``, + making functions decorated with func:`transactional`_ composable, by + default. IE, a function decorated with ``transactional_async`` can call + another function decorated with ``transactional_async`` and the second + function will be executed in the already running transaction. + + See google.cloud.ndb.transaction above for available options. + """ + + def transactional_async_wrapper(wrapped): + @functools.wraps(wrapped) + def transactional_async_inner_wrapper(*args, **kwargs): + def callback(): + return wrapped(*args, **kwargs) + + return transaction_async( + callback, + retries=retries, + read_only=read_only, + join=join, + xg=xg, + propagation=propagation, + ) + + return transactional_async_inner_wrapper + + return transactional_async_wrapper + + +def transactional_tasklet( + retries=_retry._DEFAULT_RETRIES, + read_only=False, + join=True, + xg=True, + propagation=None, +): + """A decorator that turns a function into a tasklet running in transaction. + + Wrapped function returns a Future. + + Unlike func:`transaction`_, the ``join`` argument defaults to ``True``, + making functions decorated with func:`transactional`_ composable, by + default. IE, a function decorated with ``transactional_tasklet`` can call + another function decorated with ``transactional_tasklet`` and the second + function will be executed in the already running transaction. + + See google.cloud.ndb.transaction above for available options. + """ + + def transactional_tasklet_wrapper(wrapped): + @functools.wraps(wrapped) + def transactional_tasklet_inner_wrapper(*args, **kwargs): + def callback(): + tasklet = tasklets.tasklet(wrapped) + return tasklet(*args, **kwargs) + + return transaction_async( + callback, + retries=retries, + read_only=read_only, + join=join, + xg=xg, + propagation=propagation, + ) + + return transactional_tasklet_inner_wrapper + + return transactional_tasklet_wrapper + + +def non_transactional(allow_existing=True): + """A decorator that ensures a function is run outside a transaction. + + If there is an existing transaction (and allow_existing=True), the existing + transaction is paused while the function is executed. + + Args: + allow_existing: If false, an exception will be thrown when called from + within a transaction. If true, a new non-transactional context will + be created for running the function; the original transactional + context will be saved and then restored after the function is + executed. Defaults to True. + """ + + def non_transactional_wrapper(wrapped): + @functools.wraps(wrapped) + def non_transactional_inner_wrapper(*args, **kwargs): + # Avoid circular import in Python 2.7 + from google.cloud.ndb import context as context_module + + context = context_module.get_context() + if not context.in_transaction(): + return wrapped(*args, **kwargs) + if not allow_existing: + raise exceptions.BadRequestError( + "{} cannot be called within a transaction".format(wrapped.__name__) + ) + new_context = context.new(transaction=None) + with new_context.use(): + return wrapped(*args, **kwargs) + + return non_transactional_inner_wrapper + + return non_transactional_wrapper diff --git a/packages/google-cloud-ndb/google/cloud/ndb/blobstore.py b/packages/google-cloud-ndb/google/cloud/ndb/blobstore.py new file mode 100644 index 000000000000..e2dc50280417 --- /dev/null +++ b/packages/google-cloud-ndb/google/cloud/ndb/blobstore.py @@ -0,0 +1,175 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Provides an ``ndb`` interface for the blob store. + +Initially, the blob store was an App Engine specific API for Google Cloud +Storage. + +No longer supported. +""" + + +from google.cloud.ndb import _datastore_types +from google.cloud.ndb import model +from google.cloud.ndb import exceptions + + +__all__ = [ + "BLOB_INFO_KIND", + "BLOB_KEY_HEADER", + "BLOB_MIGRATION_KIND", + "BLOB_RANGE_HEADER", + "BlobFetchSizeTooLargeError", + "BlobInfo", + "BlobInfoParseError", + "BlobKey", + "BlobKeyProperty", + "BlobNotFoundError", + "BlobReader", + "create_upload_url", + "create_upload_url_async", + "DataIndexOutOfRangeError", + "delete", + "delete_async", + "delete_multi", + "delete_multi_async", + "Error", + "fetch_data", + "fetch_data_async", + "get", + "get_async", + "get_multi", + "get_multi_async", + "InternalError", + "MAX_BLOB_FETCH_SIZE", + "parse_blob_info", + "PermissionDeniedError", + "UPLOAD_INFO_CREATION_HEADER", +] + + +BlobKey = _datastore_types.BlobKey + +BLOB_INFO_KIND = "__BlobInfo__" +BLOB_MIGRATION_KIND = "__BlobMigration__" +BLOB_KEY_HEADER = "X-AppEngine-BlobKey" +BLOB_RANGE_HEADER = "X-AppEngine-BlobRange" +MAX_BLOB_FETCH_SIZE = 1015808 +UPLOAD_INFO_CREATION_HEADER = "X-AppEngine-Upload-Creation" + +BlobKeyProperty = model.BlobKeyProperty + + +class BlobFetchSizeTooLargeError(object): + def __init__(self, *args, **kwargs): + raise exceptions.NoLongerImplementedError() + + +class BlobInfo(object): + def __init__(self, *args, **kwargs): + raise exceptions.NoLongerImplementedError() + + @classmethod + def get(cls, *args, **kwargs): + raise exceptions.NoLongerImplementedError() + + @classmethod + def get_async(cls, *args, **kwargs): + raise exceptions.NoLongerImplementedError() + + @classmethod + def get_multi(cls, *args, **kwargs): + raise exceptions.NoLongerImplementedError() + + @classmethod + def get_multi_async(cls, *args, **kwargs): + raise exceptions.NoLongerImplementedError() + + +class BlobInfoParseError(object): + def __init__(self, *args, **kwargs): + raise exceptions.NoLongerImplementedError() + + +class BlobNotFoundError(object): + def __init__(self, *args, **kwargs): + raise exceptions.NoLongerImplementedError() + + +class BlobReader(object): + def __init__(self, *args, **kwargs): + raise exceptions.NoLongerImplementedError() + + +def create_upload_url(*args, **kwargs): + raise exceptions.NoLongerImplementedError() + + +def create_upload_url_async(*args, **kwargs): + raise exceptions.NoLongerImplementedError() + + +class DataIndexOutOfRangeError(object): + def __init__(self, *args, **kwargs): + raise exceptions.NoLongerImplementedError() + + +def delete(*args, **kwargs): + raise exceptions.NoLongerImplementedError() + + +def delete_async(*args, **kwargs): + raise exceptions.NoLongerImplementedError() + + +def delete_multi(*args, **kwargs): + raise exceptions.NoLongerImplementedError() + + +def delete_multi_async(*args, **kwargs): + raise exceptions.NoLongerImplementedError() + + +class Error(object): + def __init__(self, *args, **kwargs): + raise exceptions.NoLongerImplementedError() + + +def fetch_data(*args, **kwargs): + raise exceptions.NoLongerImplementedError() + + +def fetch_data_async(*args, **kwargs): + raise exceptions.NoLongerImplementedError() + + +get = BlobInfo.get +get_async = BlobInfo.get_async +get_multi = BlobInfo.get_multi +get_multi_async = BlobInfo.get_multi_async + + +class InternalError(object): + def __init__(self, *args, **kwargs): + raise exceptions.NoLongerImplementedError() + + +def parse_blob_info(*args, **kwargs): + raise exceptions.NoLongerImplementedError() + + +class PermissionDeniedError(object): + def __init__(self, *args, **kwargs): + raise exceptions.NoLongerImplementedError() diff --git a/packages/google-cloud-ndb/google/cloud/ndb/client.py b/packages/google-cloud-ndb/google/cloud/ndb/client.py new file mode 100644 index 000000000000..8c2ae57860f6 --- /dev/null +++ b/packages/google-cloud-ndb/google/cloud/ndb/client.py @@ -0,0 +1,256 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""A client for NDB which manages credentials, project, namespace, and database.""" + +import contextlib +import grpc +import os +import requests + +import google.api_core.client_options + +from google.api_core.gapic_v1 import client_info +from google.cloud import environment_vars +from google.cloud import _helpers +from google.cloud import client as google_client +from google.cloud.datastore_v1.services.datastore.transports import ( + grpc as datastore_grpc, +) + +from google.cloud.ndb import __version__ +from google.cloud.ndb import context as context_module +from google.cloud.ndb import key as key_module + + +_CLIENT_INFO = client_info.ClientInfo( + user_agent="google-cloud-ndb/{}".format(__version__) +) + +DATASTORE_API_HOST = "datastore.googleapis.com" + + +def _get_gcd_project(): + """Gets the GCD application ID if it can be inferred.""" + return os.getenv(environment_vars.GCD_DATASET) + + +def _determine_default_project(project=None): + """Determine default project explicitly or implicitly as fall-back. + + In implicit case, supports four environments. In order of precedence, the + implicit environments are: + + * DATASTORE_DATASET environment variable (for ``gcd`` / emulator testing) + * GOOGLE_CLOUD_PROJECT environment variable + * Google App Engine application ID + * Google Compute Engine project ID (from metadata server) + _ + Arguments: + project (Optional[str]): The project to use as default. + + Returns: + Union([str, None]): Default project if it can be determined. + """ + if project is None: + project = _get_gcd_project() + + if project is None: + project = _helpers._determine_default_project(project=project) + + return project + + +class Client(google_client.ClientWithProject): + """An NDB client. + + The NDB client must be created in order to use NDB, and any use of NDB must + be within the context of a call to :meth:`context`. + + The Datastore Emulator is used for the client if and only if the + DATASTORE_EMULATOR_HOST environment variable is set. + + Arguments: + project (Optional[str]): The project to pass to proxied API methods. If + not passed, falls back to the default inferred from the + environment. + namespace (Optional[str]): Namespace to pass to proxied API methods. + credentials (Optional[:class:`~google.auth.credentials.Credentials`]): + The OAuth2 Credentials to use for this client. If not passed, falls + back to the default inferred from the environment. + client_options (Optional[:class:`~google.api_core.client_options.ClientOptions` or :class:`dict`]) + Client options used to set user options on the client. + API Endpoint should be set through client_options. + database (Optional[str]): Database to access. Defaults to the (default) database. + """ + + SCOPE = ("https://www.googleapis.com/auth/datastore",) + """The scopes required for authenticating as a Cloud Datastore consumer.""" + + def __init__( + self, + project=None, + namespace=None, + credentials=None, + client_options=None, + database=None, + ): + self.namespace = namespace + self.host = os.environ.get(environment_vars.GCD_HOST, DATASTORE_API_HOST) + self.client_info = _CLIENT_INFO + self._client_options = client_options + self.database = database + + # Use insecure connection when using Datastore Emulator, otherwise + # use secure connection + emulator = bool(os.environ.get(environment_vars.GCD_HOST)) + self.secure = not emulator + + # Use Datastore API host from client_options if provided, otherwise use default + api_endpoint = DATASTORE_API_HOST + if client_options is not None: + if isinstance(client_options, dict): + client_options = google.api_core.client_options.from_dict( + client_options + ) + if client_options.api_endpoint: + api_endpoint = client_options.api_endpoint + + self.host = os.environ.get(environment_vars.GCD_HOST, api_endpoint) + + if emulator: + # When using the emulator, in theory, the client shouldn't need to + # call home to authenticate, as you don't need to authenticate to + # use the local emulator. Unfortunately, the client calls home to + # authenticate anyway, unless you pass ``requests.Session`` to + # ``_http`` which seems to be the preferred work around. + super(Client, self).__init__( + project=project, + credentials=credentials, + client_options=client_options, + _http=requests.Session, + ) + else: + super(Client, self).__init__( + project=project, credentials=credentials, client_options=client_options + ) + + if emulator: + channel = grpc.insecure_channel( + self.host, + options=[ + # Default options provided in DatastoreGrpcTransport, but not when we override the channel. + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + else: + user_agent = self.client_info.to_user_agent() + channel = _helpers.make_secure_channel( + self._credentials, user_agent, self.host + ) + self.stub = datastore_grpc.DatastoreGrpcTransport( + host=self.host, + credentials=credentials, + client_info=self.client_info, + channel=channel, + ) + + @contextlib.contextmanager + def context( + self, + namespace=key_module.UNDEFINED, + cache_policy=None, + global_cache=None, + global_cache_policy=None, + global_cache_timeout_policy=None, + legacy_data=True, + ): + """Establish a context for a set of NDB calls. + + This method provides a context manager which establishes the runtime + state for using NDB. + + For example: + + .. code-block:: python + + from google.cloud import ndb + + client = ndb.Client() + with client.context(): + # Use NDB for some stuff + pass + + Use of a context is required--NDB can only be used inside a running + context. The context is used to manage the connection to Google Cloud + Datastore, an event loop for asynchronous API calls, runtime caching + policy, and other essential runtime state. + + Code within an asynchronous context should be single threaded. + Internally, a :class:`threading.local` instance is used to track the + current event loop. + + In a web application, it is recommended that a single context be used + per HTTP request. This can typically be accomplished in a middleware + layer. + + Arguments: + cache_policy (Optional[Callable[[key.Key], bool]]): The + cache policy to use in this context. See: + :meth:`~google.cloud.ndb.context.Context.set_cache_policy`. + global_cache (Optional[global_cache.GlobalCache]): + The global cache for this context. See: + :class:`~google.cloud.ndb.global_cache.GlobalCache`. + global_cache_policy (Optional[Callable[[key.Key], bool]]): The + global cache policy to use in this context. See: + :meth:`~google.cloud.ndb.context.Context.set_global_cache_policy`. + global_cache_timeout_policy (Optional[Callable[[key.Key], int]]): + The global cache timeout to use in this context. See: + :meth:`~google.cloud.ndb.context.Context.set_global_cache_timeout_policy`. + legacy_data (bool): Set to ``True`` (the default) to write data in + a way that can be read by the legacy version of NDB. + """ + context = context_module.get_context(False) + if context is not None: + raise RuntimeError("Context is already created for this thread.") + + context = context_module.Context( + self, + namespace=namespace, + cache_policy=cache_policy, + global_cache=global_cache, + global_cache_policy=global_cache_policy, + global_cache_timeout_policy=global_cache_timeout_policy, + legacy_data=legacy_data, + ) + with context.use(): + yield context + + # Finish up any work left to do on the event loop + context.eventloop.run() + + @property + def _http(self): + """Getter for object used for HTTP transport. + + Raises: + NotImplementedError: Always, HTTP transport is not supported. + """ + raise NotImplementedError("HTTP transport is not supported.") + + @staticmethod + def _determine_default(project): + """Helper: override default project detection.""" + return _determine_default_project(project) diff --git a/packages/google-cloud-ndb/google/cloud/ndb/context.py b/packages/google-cloud-ndb/google/cloud/ndb/context.py new file mode 100644 index 000000000000..d8c47f523449 --- /dev/null +++ b/packages/google-cloud-ndb/google/cloud/ndb/context.py @@ -0,0 +1,701 @@ +# -*- coding: utf-8 -*- +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Context for currently running tasks and transactions.""" + +import collections +import contextlib +import contextvars +import itertools +import os +import threading +import uuid + +from google.cloud.ndb import _eventloop +from google.cloud.ndb import exceptions +from google.cloud.ndb import key as key_module + + +class _ContextIds: + """Iterator which generates a sequence of context ids. + + Useful for debugging complicated interactions among concurrent processes and + threads. + + Each value in the sequence is a string that include the machine's "node", acquired + via `uuid.getnode()`, the current process id, and a sequence number which increases + monotonically starting from one in each process. The combination of all three is + sufficient to uniquely identify the context in which a particular piece of code is + being run. Each context, as it is created, is assigned the next id in this sequence. + The context id is used by `utils.logging_debug` to grant insight into where a debug + logging statement is coming from in a cloud environment. + """ + + def __init__(self): + self.prefix = "{}-{}-".format(uuid.getnode(), os.getpid()) + self.counter = itertools.count(1) + self.lock = threading.Lock() + + def __next__(self): + with self.lock: + sequence_number = next(self.counter) + + return self.prefix + str(sequence_number) + + next = __next__ # Python 2.7 + + +_context_ids = _ContextIds() + + +class _LocalState: + """Thread local state.""" + + def __init__(self): + self._toplevel_context = contextvars.ContextVar( + "_toplevel_context", default=None + ) + self._context = contextvars.ContextVar("_context", default=None) + + @property + def context(self): + return self._context.get() + + @context.setter + def context(self, value): + self._context.set(value) + + @property + def toplevel_context(self): + return self._toplevel_context.get() + + @toplevel_context.setter + def toplevel_context(self, value): + self._toplevel_context.set(value) + + +_state = _LocalState() + + +def get_context(raise_context_error=True): + """Get the current context. + + This function should be called within a context established by + :meth:`google.cloud.ndb.client.Client.context`. + + Args: + raise_context_error (bool): If set to :data:`True`, will raise an + exception if called outside of a context. Set this to :data:`False` + in order to have it just return :data:`None` if called outside of a + context. Default: :data:`True` + + Returns: + Context: The current context. + + Raises: + exceptions.ContextError: If called outside of a context + established by :meth:`google.cloud.ndb.client.Client.context` and + ``raise_context_error`` is :data:`True`. + """ + context = _state.context + if context: + return context + + if raise_context_error: + raise exceptions.ContextError() + + +def get_toplevel_context(raise_context_error=True): + """Get the current top level context. + + This function should be called within a context established by + :meth:`google.cloud.ndb.client.Client.context`. + + The toplevel context is the context created by the call to + :meth:`google.cloud.ndb.client.Client.context`. At times, this context will + be superseded by subcontexts, which are used, for example, during + transactions. This function will always return the top level context + regardless of whether one of these subcontexts is the current one. + + Args: + raise_context_error (bool): If set to :data:`True`, will raise an + exception if called outside of a context. Set this to :data:`False` + in order to have it just return :data:`None` if called outside of a + context. Default: :data:`True` + + Returns: + Context: The current context. + + Raises: + exceptions.ContextError: If called outside of a context + established by :meth:`google.cloud.ndb.client.Client.context` and + ``raise_context_error`` is :data:`True`. + """ + context = _state.toplevel_context + if context: + return context + + if raise_context_error: + raise exceptions.ContextError() + + +def _default_policy(attr_name, value_type): + """Factory for producing default policies. + + Born of the observation that all default policies are more less the + same—they defer to some attribute on the model class for the key's kind and + expects the value to be either of a particular type or a callable. + + Returns: + Callable[[key], value_type]: A policy function suitable for use as a + default policy. + """ + # avoid circular imports on Python 2.7 + from google.cloud.ndb import model + + def policy(key): + value = None + if key is not None: + kind = key.kind + if callable(kind): + kind = kind() + modelclass = model.Model._kind_map.get(kind) + if modelclass is not None: + policy = getattr(modelclass, attr_name, None) + if policy is not None: + if isinstance(policy, value_type): + value = policy + else: + value = policy(key) + + return value + + return policy + + +_default_cache_policy = _default_policy("_use_cache", bool) +"""The default cache policy. + +Defers to ``_use_cache`` on the Model class for the key's kind. + +See: :meth:`~google.cloud.ndb.context.Context.set_cache_policy` +""" + +_default_global_cache_policy = _default_policy("_use_global_cache", bool) +"""The default global cache policy. + +Defers to ``_use_global_cache`` on the Model class for the key's kind. + +See: :meth:`~google.cloud.ndb.context.Context.set_global_cache_policy` +""" + +_default_global_cache_timeout_policy = _default_policy("_global_cache_timeout", int) +"""The default global cache timeout policy. + +Defers to ``_global_cache_timeout`` on the Model class for the key's kind. + +See: :meth:`~google.cloud.ndb.context.Context.set_global_cache_timeout_policy` +""" + +_default_datastore_policy = _default_policy("_use_datastore", bool) +"""The default datastore policy. + +Defers to ``_use_datastore`` on the Model class for the key's kind. + +See: :meth:`~google.cloud.ndb.context.Context.set_datastore_policy` +""" + + +_ContextTuple = collections.namedtuple( + "_ContextTuple", + [ + "id", + "client", + "namespace", + "eventloop", + "batches", + "commit_batches", + "transaction", + "cache", + "global_cache", + "on_commit_callbacks", + "transaction_complete_callbacks", + "legacy_data", + ], +) + + +class _Context(_ContextTuple): + """Current runtime state. + + Instances of this class hold on to runtime state such as the current event + loop, current transaction, etc. Instances are shallowly immutable, but + contain references to data structures which are mutable, such as the event + loop. A new context can be derived from an existing context using + :meth:`new`. + + :class:`Context` is a subclass of :class:`_Context` which provides only + publicly facing interface. The use of two classes is only to provide a + distinction between public and private API. + + Arguments: + client (client.Client): The NDB client for this context. + """ + + def __new__( + cls, + client, + id=None, + namespace=key_module.UNDEFINED, + eventloop=None, + batches=None, + commit_batches=None, + transaction=None, + cache=None, + cache_policy=None, + global_cache=None, + global_cache_policy=None, + global_cache_timeout_policy=None, + datastore_policy=None, + on_commit_callbacks=None, + transaction_complete_callbacks=None, + legacy_data=True, + retry=None, + rpc_time=None, + wait_time=None, + ): + # Prevent circular import in Python 2.7 + from google.cloud.ndb import _cache + + if id is None: + id = next(_context_ids) + + if eventloop is None: + eventloop = _eventloop.EventLoop() + + if batches is None: + batches = {} + + if commit_batches is None: + commit_batches = {} + + # Create a cache and, if an existing cache was passed into this + # method, duplicate its entries. + new_cache = _cache.ContextCache() + if cache: + new_cache.update(cache) + + context = super(_Context, cls).__new__( + cls, + id=id, + client=client, + namespace=namespace, + eventloop=eventloop, + batches=batches, + commit_batches=commit_batches, + transaction=transaction, + cache=new_cache, + global_cache=global_cache, + on_commit_callbacks=on_commit_callbacks, + transaction_complete_callbacks=transaction_complete_callbacks, + legacy_data=legacy_data, + ) + + context.set_cache_policy(cache_policy) + context.set_global_cache_policy(global_cache_policy) + context.set_global_cache_timeout_policy(global_cache_timeout_policy) + context.set_datastore_policy(datastore_policy) + context.set_retry_state(retry) + + return context + + def new(self, **kwargs): + """Create a new :class:`_Context` instance. + + New context will be the same as context except values from ``kwargs`` + will be substituted. + """ + fields = self._fields + tuple(self.__dict__.keys()) + state = { + name: getattr(self, name) for name in fields if not name.startswith("_") + } + state.update(kwargs) + return type(self)(**state) + + @contextlib.contextmanager + def use(self): + """Use this context as the current context. + + This method returns a context manager for use with the ``with`` + statement. Code inside the ``with`` context will see this context as + the current context. + """ + prev_context = _state.context + _state.context = self + if not prev_context: + _state.toplevel_context = self + self.rpc_time = 0 + self.wait_time = 0 + try: + yield self + finally: + if prev_context: + prev_context.cache.update(self.cache) + else: + _state.toplevel_context = None + _state.context = prev_context + + def _use_cache(self, key, options=None): + """Return whether to use the context cache for this key.""" + flag = options.use_cache if options else None + if flag is None: + flag = self.cache_policy(key) + if flag is None: + flag = True + return flag + + def _use_global_cache(self, key, options=None): + """Return whether to use the global cache for this key.""" + if self.global_cache is None: + return False + + flag = options.use_global_cache if options else None + if flag is None: + flag = self.global_cache_policy(key) + if flag is None: + flag = True + return flag + + def _global_cache_timeout(self, key, options): + """Return global cache timeout (expiration) for this key.""" + timeout = None + if options: + timeout = options.global_cache_timeout + if timeout is None: + timeout = self.global_cache_timeout_policy(key) + return timeout + + def _use_datastore(self, key, options=None): + """Return whether to use the Datastore for this key.""" + flag = options.use_datastore if options else None + if flag is None: + flag = self.datastore_policy(key) + if flag is None: + flag = True + return flag + + +class Context(_Context): + """User management of cache and other policy.""" + + def clear_cache(self): + """Clears the in-memory cache. + + This does not affect global cache. + """ + self.cache.clear() + + def flush(self): + """Force any pending batch operations to go ahead and run.""" + self.eventloop.run() + + def get_namespace(self): + """Return the current context namespace. + + If `namespace` isn't set on the context, the client's namespace will be + returned. + + Returns: + str: The namespace, or `None`. + """ + if self.namespace is key_module.UNDEFINED: + return self.client.namespace + + return self.namespace + + def get_cache_policy(self): + """Return the current context cache policy function. + + Returns: + Callable: A function that accepts a + :class:`~google.cloud.ndb.key.Key` instance as a single + positional argument and returns a ``bool`` indicating if it + should be cached. May be :data:`None`. + """ + return self.cache_policy + + def get_datastore_policy(self): + """Return the current context datastore policy function. + + Returns: + Callable: A function that accepts a + :class:`~google.cloud.ndb.key.Key` instance as a single + positional argument and returns a ``bool`` indicating if it + should use the datastore. May be :data:`None`. + """ + raise NotImplementedError + + def get_global_cache_policy(self): + """Return the current global cache policy function. + + Returns: + Callable: A function that accepts a + :class:`~google.cloud.ndb.key.Key` instance as a single + positional argument and returns a ``bool`` indicating if it + should be cached. May be :data:`None`. + """ + return self.global_cache_policy + + get_memcache_policy = get_global_cache_policy # backwards compatibility + + def get_global_cache_timeout_policy(self): + """Return the current policy function global cache timeout (expiration). + + Returns: + Callable: A function that accepts a + :class:`~google.cloud.ndb.key.Key` instance as a single + positional argument and returns an ``int`` indicating the + timeout, in seconds, for the key. ``0`` implies the default + timeout. May be :data:`None`. + """ + return self.global_cache_timeout_policy + + get_memcache_timeout_policy = get_global_cache_timeout_policy + + def set_cache_policy(self, policy): + """Set the context cache policy function. + + Args: + policy (Callable): A function that accepts a + :class:`~google.cloud.ndb.key.Key` instance as a single + positional argument and returns a ``bool`` indicating if it + should be cached. May be :data:`None`. + """ + if policy is None: + policy = _default_cache_policy + + elif isinstance(policy, bool): + flag = policy + + def policy(key): + return flag + + self.cache_policy = policy + + def set_datastore_policy(self, policy): + """Set the context datastore policy function. + + Args: + policy (Callable): A function that accepts a + :class:`~google.cloud.ndb.key.Key` instance as a single + positional argument and returns a ``bool`` indicating if it + should use the datastore. May be :data:`None`. + """ + if policy is None: + policy = _default_datastore_policy + + elif isinstance(policy, bool): + flag = policy + + def policy(key): + return flag + + self.datastore_policy = policy + + def set_global_cache_policy(self, policy): + """Set the global cache policy function. + + Args: + policy (Callable): A function that accepts a + :class:`~google.cloud.ndb.key.Key` instance as a single + positional argument and returns a ``bool`` indicating if it + should be cached. May be :data:`None`. + """ + if policy is None: + policy = _default_global_cache_policy + + elif isinstance(policy, bool): + flag = policy + + def policy(key): + return flag + + self.global_cache_policy = policy + + set_memcache_policy = set_global_cache_policy # backwards compatibility + + def set_global_cache_timeout_policy(self, policy): + """Set the policy function for global cache timeout (expiration). + + Args: + policy (Callable): A function that accepts a + :class:`~google.cloud.ndb.key.Key` instance as a single + positional argument and returns an ``int`` indicating the + timeout, in seconds, for the key. ``0`` implies the default + timeout. May be :data:`None`. + """ + if policy is None: + policy = _default_global_cache_timeout_policy + + elif isinstance(policy, int): + timeout = policy + + def policy(key): + return timeout + + self.global_cache_timeout_policy = policy + + set_memcache_timeout_policy = set_global_cache_timeout_policy + + def get_retry_state(self): + return self._retry + + def set_retry_state(self, state): + self._retry = state + + def clear_retry_state(self): + self._retry = None + + def call_on_commit(self, callback): + """Call a callback upon successful commit of a transaction. + + If not in a transaction, the callback is called immediately. + + In a transaction, multiple callbacks may be registered and will be + called once the transaction commits, in the order in which they + were registered. If the transaction fails, the callbacks will not + be called. + + If the callback raises an exception, it bubbles up normally. This + means: If the callback is called immediately, any exception it + raises will bubble up immediately. If the call is postponed until + commit, remaining callbacks will be skipped and the exception will + bubble up through the transaction() call. (However, the + transaction is already committed at that point.) + + Args: + callback (Callable): The callback function. + """ + if self.in_transaction(): + self.on_commit_callbacks.append(callback) + else: + callback() + + def call_on_transaction_complete(self, callback): + """Call a callback upon completion of a transaction. + + If not in a transaction, the callback is called immediately. + + In a transaction, multiple callbacks may be registered and will be called once + the transaction completes, in the order in which they were registered. Callbacks + are called regardless of whether transaction is committed or rolled back. + + If the callback raises an exception, it bubbles up normally. This means: If the + callback is called immediately, any exception it raises will bubble up + immediately. If the call is postponed until commit, remaining callbacks will be + skipped and the exception will bubble up through the transaction() call. + (However, the transaction is already committed or rolled back at that point.) + + Args: + callback (Callable): The callback function. + """ + if self.in_transaction(): + self.transaction_complete_callbacks.append(callback) + else: + callback() + + def in_transaction(self): + """Get whether a transaction is currently active. + + Returns: + bool: :data:`True` if currently in a transaction, otherwise + :data:`False`. + """ + return self.transaction is not None + + def in_retry(self): + """Get whether we are already in a retry block. + + Returns: + bool: :data:`True` if currently in a retry block, otherwise + :data:`False`. + """ + return self._retry is not None + + def memcache_add(self, *args, **kwargs): + """Direct pass-through to memcache client. No longer implemented.""" + raise exceptions.NoLongerImplementedError() + + def memcache_cas(self, *args, **kwargs): + """Direct pass-through to memcache client. No longer implemented.""" + raise exceptions.NoLongerImplementedError() + + def memcache_decr(self, *args, **kwargs): + """Direct pass-through to memcache client. No longer implemented.""" + raise exceptions.NoLongerImplementedError() + + def memcache_delete(self, *args, **kwargs): + """Direct pass-through to memcache client. No longer implemented.""" + raise exceptions.NoLongerImplementedError() + + def memcache_get(self, *args, **kwargs): + """Direct pass-through to memcache client. No longer implemented.""" + raise exceptions.NoLongerImplementedError() + + def memcache_gets(self, *args, **kwargs): + """Direct pass-through to memcache client. No longer implemented.""" + raise exceptions.NoLongerImplementedError() + + def memcache_incr(self, *args, **kwargs): + """Direct pass-through to memcache client. No longer implemented.""" + raise exceptions.NoLongerImplementedError() + + def memcache_replace(self, *args, **kwargs): + """Direct pass-through to memcache client. No longer implemented.""" + raise exceptions.NoLongerImplementedError() + + def memcache_set(self, *args, **kwargs): + """Direct pass-through to memcache client. No longer implemented.""" + raise exceptions.NoLongerImplementedError() + + def urlfetch(self, *args, **kwargs): + """Fetch a resource using HTTP. No longer implemented.""" + raise exceptions.NoLongerImplementedError() + + +class ContextOptions(object): + def __init__(self, *args, **kwargs): + raise exceptions.NoLongerImplementedError() + + +class TransactionOptions(object): + NESTED = 1 # join=False + MANDATORY = 2 # join=True + ALLOWED = 3 # join=True + INDEPENDENT = 4 # join=False + + _PROPAGATION = frozenset((NESTED, MANDATORY, ALLOWED, INDEPENDENT)) + _JOINABLE = frozenset((MANDATORY, ALLOWED)) + _INT_TO_NAME = { + NESTED: "nested", + MANDATORY: "mandatory", + ALLOWED: "allowed", + INDEPENDENT: "independent", + } + + +class AutoBatcher(object): + def __init__(self, *args, **kwargs): + raise exceptions.NoLongerImplementedError() diff --git a/packages/google-cloud-ndb/google/cloud/ndb/django_middleware.py b/packages/google-cloud-ndb/google/cloud/ndb/django_middleware.py new file mode 100644 index 000000000000..361c2a00751d --- /dev/null +++ b/packages/google-cloud-ndb/google/cloud/ndb/django_middleware.py @@ -0,0 +1,29 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Django middleware for ``ndb``. + +This class is not implemented and is no longer necessary. + +To use Django middleware with NDB, follow the steps in +https://cloud.google.com/appengine/docs/standard/python3/migrating-to-cloud-ndb#using_a_runtime_context_with_django +""" + + +__all__ = ["NdbDjangoMiddleware"] + + +class NdbDjangoMiddleware(object): + def __init__(self, *args, **kwargs): + raise NotImplementedError diff --git a/packages/google-cloud-ndb/google/cloud/ndb/exceptions.py b/packages/google-cloud-ndb/google/cloud/ndb/exceptions.py new file mode 100644 index 000000000000..6c4b726292d3 --- /dev/null +++ b/packages/google-cloud-ndb/google/cloud/ndb/exceptions.py @@ -0,0 +1,131 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Classes representing legacy Google App Engine exceptions. + +Unless otherwise noted, these are meant to act as shims for the exception +types defined in the ``google.appengine.api.datastore_errors`` module in +legacy Google App Engine runtime. +""" + + +__all__ = [ + "Error", + "ContextError", + "BadValueError", + "BadArgumentError", + "BadRequestError", + "Rollback", + "BadQueryError", + "BadFilterError", +] + + +class Error(Exception): + """Base datastore error type.""" + + +class ContextError(Error): + """Indicates an NDB call being made without a context. + + Raised whenever an NDB call is made outside of a context + established by :meth:`google.cloud.ndb.client.Client.context`. + """ + + def __init__(self): + super(ContextError, self).__init__( + "No current context. NDB calls must be made in context " + "established by google.cloud.ndb.Client.context." + ) + + +class BadValueError(Error): + """Indicates a property value or filter value is invalid. + + Raised by ``Entity.__setitem__()``, ``Query.__setitem__()``, ``Get()``, + and others. + """ + + +class BadArgumentError(Error): + """Indicates an invalid argument was passed. + + Raised by ``Query.Order()``, ``Iterator.Next()``, and others. + """ + + +class BadRequestError(Error): + """Indicates a bad request was passed. + + Raised by ``Model.non_transactional()`` and others. + """ + + +class Rollback(Error): + """Allows a transaction to be rolled back instead of committed. + + Note that *any* exception raised by a transaction function will cause a + rollback. Hence, this exception type is purely for convenience. + """ + + +class BadQueryError(Error): + """Raised by Query when a query or query string is invalid.""" + + +class BadFilterError(Error): + """Indicates a filter value is invalid. + + Raised by ``Query.__setitem__()`` and ``Query.Run()`` when a filter string + is invalid. + """ + + def __init__(self, filter): + self.filter = filter + message = "invalid filter: {}.".format(self.filter).encode("utf-8") + super(BadFilterError, self).__init__(message) + + +class NoLongerImplementedError(NotImplementedError): + """Indicates a legacy function that is intentionally left unimplemented. + + In the vast majority of cases, this should only be raised by classes, + functions, or methods that were only been used internally in legacy NDB and + are no longer necessary because of refactoring. Legacy NDB did a poor job + of distinguishing between internal and public API. Where we have determined + that something is probably not a part of the public API, we've removed it + in order to keep the supported API as clean as possible. It's possible that + in some cases we've guessed wrong. Get in touch with the NDB development + team if you think this is the case. + """ + + def __init__(self): + super(NoLongerImplementedError, self).__init__("No longer implemented") + + +class Cancelled(Error): + """An operation has been cancelled by user request. + + Raised when trying to get a result from a future that has been cancelled by + a call to ``Future.cancel`` (possibly on a future that depends on this + future). + """ + + +class NestedRetryException(Error): + """A nested retry block raised an exception. + + Raised when a nested retry block cannot complete due to an exception. This + allows the outer retry to get back control and retry the whole operation. + """ diff --git a/packages/google-cloud-ndb/google/cloud/ndb/global_cache.py b/packages/google-cloud-ndb/google/cloud/ndb/global_cache.py new file mode 100644 index 000000000000..4e3c6b7c6d3f --- /dev/null +++ b/packages/google-cloud-ndb/google/cloud/ndb/global_cache.py @@ -0,0 +1,688 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""GlobalCache interface and its implementations.""" + +import abc +import base64 +import hashlib +import os +import pymemcache.exceptions +import redis.exceptions +import threading +import time +import warnings + +import pymemcache +import redis as redis_module + +# Python 2.7 doesn't have ConnectionError. In Python 3, ConnectionError is subclass of +# OSError, which Python 2.7 does have. +ConnectionError = getattr(__builtins__, "ConnectionError", OSError) + + +class GlobalCache(object): + """Abstract base class for a global entity cache. + + A global entity cache is shared across contexts, sessions, and possibly + even servers. A concrete implementation is available which uses Redis. + + Essentially, this class models a simple key/value store where keys and + values are arbitrary ``bytes`` instances. "Compare and swap", aka + "optimistic transactions" should also be supported. + + Concrete implementations can either by synchronous or asynchronous. + Asynchronous implementations should return + :class:`~google.cloud.ndb.tasklets.Future` instances whose eventual results + match the return value described for each method. Because coordinating with + the single threaded event model used by ``NDB`` can be tricky with remote + services, it's not recommended that casual users write asynchronous + implementations, as some specialized knowledge is required. + + Attributes: + strict_read (bool): If :data:`False`, transient errors that occur as part of a + entity lookup operation will be logged as warnings but not raised to the + application layer. If :data:`True`, in the event of transient errors, cache + operations will be retried a number of times before eventually raising the + transient error to the application layer, if it does not resolve after + retrying. Setting this to :data:`True` will cause NDB operations to take + longer to complete if there are transient errors in the cache layer. + strict_write (bool): If :data:`False`, transient errors that occur as part of + a put or delete operation will be logged as warnings, but not raised to the + application layer. If :data:`True`, in the event of transient errors, cache + operations will be retried a number of times before eventually raising the + transient error to the application layer if it does not resolve after + retrying. Setting this to :data:`False` somewhat increases the risk + that other clients might read stale data from the cache. Setting this to + :data:`True` will cause NDB operations to take longer to complete if there + are transient errors in the cache layer. + """ + + __metaclass__ = abc.ABCMeta + + transient_errors = () + """Exceptions that should be treated as transient errors in non-strict modes. + + Instances of these exceptions, if raised, will be logged as warnings but will not + be raised to the application layer, depending on the values of the ``strict_read`` + and ``strict_write`` attributes of the instance. + + This should be overridden by subclasses. + """ + + strict_read = True + strict_write = True + + @abc.abstractmethod + def get(self, keys): + """Retrieve entities from the cache. + + Arguments: + keys (List[bytes]): The keys to get. + + Returns: + List[Union[bytes, None]]]: Serialized entities, or :data:`None`, + for each key. + """ + raise NotImplementedError + + @abc.abstractmethod + def set(self, items, expires=None): + """Store entities in the cache. + + Arguments: + items (Dict[bytes, Union[bytes, None]]): Mapping of keys to + serialized entities. + expires (Optional[float]): Number of seconds until value expires. + + Returns: + Optional[Dict[bytes, Any]]: May return :data:`None`, or a `dict` mapping + keys to arbitrary results. If the result for a key is an instance of + `Exception`, the result will be raised as an exception in that key's + future. + """ + raise NotImplementedError + + @abc.abstractmethod + def set_if_not_exists(self, items, expires=None): + """Stores entities in the cache if and only if keys are not already set. + + Arguments: + items (Dict[bytes, Union[bytes, None]]): Mapping of keys to + serialized entities. + expires (Optional[float]): Number of seconds until value expires. + + + Returns: + Dict[bytes, bool]: A `dict` mapping to boolean value that will be + :data:`True` if that key was set with a new value, and :data:`False` + otherwise. + """ + raise NotImplementedError + + @abc.abstractmethod + def delete(self, keys): + """Remove entities from the cache. + + Arguments: + keys (List[bytes]): The keys to remove. + """ + raise NotImplementedError + + @abc.abstractmethod + def watch(self, items): + """Begin an optimistic transaction for the given items. + + A future call to :meth:`compare_and_swap` will only set values for keys + whose values haven't changed since the call to this method. Values are used to + check that the watched value matches the expected value for a given key. + + Arguments: + items (Dict[bytes, bytes]): The items to watch. + """ + raise NotImplementedError + + @abc.abstractmethod + def unwatch(self, keys): + """End an optimistic transaction for the given keys. + + Indicates that value for the key wasn't found in the database, so there will not + be a future call to :meth:`compare_and_swap`, and we no longer need to watch + this key. + + Arguments: + keys (List[bytes]): The keys to watch. + """ + raise NotImplementedError + + @abc.abstractmethod + def compare_and_swap(self, items, expires=None): + """Like :meth:`set` but using an optimistic transaction. + + Only keys whose values haven't changed since a preceding call to + :meth:`watch` will be changed. + + Arguments: + items (Dict[bytes, Union[bytes, None]]): Mapping of keys to + serialized entities. + expires (Optional[float]): Number of seconds until value expires. + + Returns: + Dict[bytes, bool]: A mapping of key to result. A key will have a result of + :data:`True` if it was changed successfully. + """ + raise NotImplementedError + + @abc.abstractmethod + def clear(self): + """Clear all keys from global cache. + + Will be called if there previously was a connection error, to prevent clients + from reading potentially stale data from the cache. + """ + raise NotImplementedError + + +class _InProcessGlobalCache(GlobalCache): + """Reference implementation of :class:`GlobalCache`. + + Not intended for production use. Uses a single process wide dictionary to + keep an in memory cache. For use in testing and to have an easily grokkable + reference implementation. Thread safety is potentially a little sketchy. + """ + + cache = {} + """Dict: The cache. + + Relies on atomicity of ``__setitem__`` for thread safety. See: + http://effbot.org/pyfaq/what-kinds-of-global-value-mutation-are-thread-safe.htm + """ + + def __init__(self): + self._watch_keys = {} + + def get(self, keys): + """Implements :meth:`GlobalCache.get`.""" + now = time.time() + results = [self.cache.get(key) for key in keys] + entity_pbs = [] + for result in results: + if result is not None: + entity_pb, expires = result + if expires and expires < now: + entity_pb = None + else: + entity_pb = None + + entity_pbs.append(entity_pb) + + return entity_pbs + + def set(self, items, expires=None): + """Implements :meth:`GlobalCache.set`.""" + if expires: + expires = time.time() + expires + + for key, value in items.items(): + self.cache[key] = (value, expires) # Supposedly threadsafe + + def set_if_not_exists(self, items, expires=None): + """Implements :meth:`GlobalCache.set_if_not_exists`.""" + if expires: + expires = time.time() + expires + + results = {} + for key, value in items.items(): + set_value = (value, expires) + results[key] = self.cache.setdefault(key, set_value) is set_value + + return results + + def delete(self, keys): + """Implements :meth:`GlobalCache.delete`.""" + for key in keys: + self.cache.pop(key, None) # Threadsafe? + + def watch(self, items): + """Implements :meth:`GlobalCache.watch`.""" + for key, value in items.items(): + self._watch_keys[key] = value + + def unwatch(self, keys): + """Implements :meth:`GlobalCache.unwatch`.""" + for key in keys: + self._watch_keys.pop(key, None) + + def compare_and_swap(self, items, expires=None): + """Implements :meth:`GlobalCache.compare_and_swap`.""" + if expires: + expires = time.time() + expires + + results = {key: False for key in items.keys()} + for key, new_value in items.items(): + watch_value = self._watch_keys.get(key) + current_value = self.cache.get(key) + current_value = current_value[0] if current_value else current_value + if watch_value == current_value: + self.cache[key] = (new_value, expires) + results[key] = True + + return results + + def clear(self): + """Implements :meth:`GlobalCache.clear`.""" + self.cache.clear() + + +class RedisCache(GlobalCache): + """Redis implementation of the :class:`GlobalCache`. + + This is a synchronous implementation. The idea is that calls to Redis + should be fast enough not to warrant the added complexity of an + asynchronous implementation. + + Args: + redis (redis.Redis): Instance of Redis client to use. + strict_read (bool): If :data:`False`, connection errors during read operations + will be logged with a warning and treated as cache misses, but will not + raise an exception in the application, with connection errors during reads + being treated as cache misses. If :data:`True`, in the event of connection + errors, cache operations will be retried a number of times before eventually + raising the connection error to the application layer, if it does not + resolve after retrying. Setting this to :data:`True` will cause NDB + operations to take longer to complete if there are transient errors in the + cache layer. Default: :data:`False`. + strict_write (bool): If :data:`False`, connection errors during write + operations will be logged with a warning, but will not raise an exception in + the application. If :data:`True`, connection errors during write will be + raised as exceptions in the application. Because write operations involve + cache invalidation, setting this to :data:`False` may allow other clients to + retrieve stale data from the cache. If :data:`True`, in the event of + connection errors, cache operations will be retried a number of times before + eventually raising the connection error to the application layer, if it does + not resolve after retrying. Setting this to :data:`True` will cause NDB + operations to take longer to complete if there are transient errors in the + cache layer. Default: :data:`True`. + """ + + transient_errors = ( + IOError, + ConnectionError, + redis.exceptions.ConnectionError, + redis.exceptions.TimeoutError, + ) + + @classmethod + def from_environment(cls, strict_read=False, strict_write=True): + """Generate a class:`RedisCache` from an environment variable. + + This class method looks for the ``REDIS_CACHE_URL`` environment + variable and, if it is set, passes its value to ``Redis.from_url`` to + construct a ``Redis`` instance which is then used to instantiate a + ``RedisCache`` instance. + + Args: + strict_read (bool): If :data:`False`, connection errors during read + operations will be logged with a warning and treated as cache misses, + but will not raise an exception in the application, with connection + errors during reads being treated as cache misses. If :data:`True`, in + the event of connection errors, cache operations will be retried a + number of times before eventually raising the connection error to the + application layer, if it does not resolve after retrying. Setting this + to :data:`True` will cause NDB operations to take longer to complete if + there are transient errors in the cache layer. Default: :data:`False`. + strict_write (bool): If :data:`False`, connection errors during write + operations will be logged with a warning, but will not raise an + exception in the application. If :data:`True`, connection errors during + write will be raised as exceptions in the application. Because write + operations involve cache invalidation, setting this to :data:`False` may + allow other clients to retrieve stale data from the cache. If + :data:`True`, in the event of connection errors, cache operations will + be retried a number of times before eventually raising the connection + error to the application layer, if it does not resolve after retrying. + Setting this to :data:`True` will cause NDB operations to take longer to + complete if there are transient errors in the cache layer. Default: + :data:`True`. + + Returns: + Optional[RedisCache]: A :class:`RedisCache` instance or + :data:`None`, if ``REDIS_CACHE_URL`` is not set in the + environment. + """ + url = os.environ.get("REDIS_CACHE_URL") + if url: + return cls(redis_module.Redis.from_url(url)) + + def __init__(self, redis, strict_read=False, strict_write=True): + self.redis = redis + self.strict_read = strict_read + self.strict_write = strict_write + self._pipes = threading.local() + + @property + def pipes(self): + local = self._pipes + if not hasattr(local, "pipes"): + local.pipes = {} + return local.pipes + + def get(self, keys): + """Implements :meth:`GlobalCache.get`.""" + res = self.redis.mget(keys) + return res + + def set(self, items, expires=None): + """Implements :meth:`GlobalCache.set`.""" + self.redis.mset(items) + if expires: + for key in items.keys(): + self.redis.expire(key, expires) + + def set_if_not_exists(self, items, expires=None): + """Implements :meth:`GlobalCache.set_if_not_exists`.""" + results = {} + for key, value in items.items(): + results[key] = key_was_set = self.redis.setnx(key, value) + if key_was_set and expires: + self.redis.expire(key, expires) + + return results + + def delete(self, keys): + """Implements :meth:`GlobalCache.delete`.""" + self.redis.delete(*keys) + + def watch(self, items): + """Implements :meth:`GlobalCache.watch`.""" + for key, value in items.items(): + pipe = self.redis.pipeline() + pipe.watch(key) + if pipe.get(key) == value: + self.pipes[key] = pipe + else: + pipe.reset() + + def unwatch(self, keys): + """Implements :meth:`GlobalCache.watch`.""" + for key in keys: + pipe = self.pipes.pop(key, None) + if pipe: + pipe.reset() + + def compare_and_swap(self, items, expires=None): + """Implements :meth:`GlobalCache.compare_and_swap`.""" + results = {key: False for key in items.keys()} + + pipes = self.pipes + for key, value in items.items(): + pipe = pipes.pop(key, None) + if pipe is None: + continue + + try: + pipe.multi() + if expires: + pipe.setex(key, expires, value) + else: + pipe.set(key, value) + pipe.execute() + results[key] = True + + except redis_module.exceptions.WatchError: + pass + + finally: + pipe.reset() + + return results + + def clear(self): + """Implements :meth:`GlobalCache.clear`.""" + self.redis.flushdb() + + +class MemcacheCache(GlobalCache): + """Memcache implementation of the :class:`GlobalCache`. + + This is a synchronous implementation. The idea is that calls to Memcache + should be fast enough not to warrant the added complexity of an + asynchronous implementation. + + Args: + client (pymemcache.Client): Instance of Memcache client to use. + strict_read (bool): If :data:`False`, connection errors during read + operations will be logged with a warning and treated as cache misses, + but will not raise an exception in the application, with connection + errors during reads being treated as cache misses. If :data:`True`, in + the event of connection errors, cache operations will be retried a + number of times before eventually raising the connection error to the + application layer, if it does not resolve after retrying. Setting this + to :data:`True` will cause NDB operations to take longer to complete if + there are transient errors in the cache layer. Default: :data:`False`. + strict_write (bool): If :data:`False`, connection errors during write + operations will be logged with a warning, but will not raise an + exception in the application. If :data:`True`, connection errors during + write will be raised as exceptions in the application. Because write + operations involve cache invalidation, setting this to :data:`False` may + allow other clients to retrieve stale data from the cache. If :data:`True`, + in the event of connection errors, cache operations will be retried a number + of times before eventually raising the connection error to the application + layer, if it does not resolve after retrying. Setting this to :data:`True` + will cause NDB operations to take longer to complete if there are transient + errors in the cache layer. Default: :data:`True`. + """ + + class KeyNotSet(Exception): + def __init__(self, key): + self.key = key + super(MemcacheCache.KeyNotSet, self).__init__( + "SET operation failed in memcache for key: {}".format(key) + ) + + def __eq__(self, other): + if isinstance(other, type(self)): + return self.key == other.key + return NotImplemented + + transient_errors = ( + IOError, + ConnectionError, + KeyNotSet, + pymemcache.exceptions.MemcacheServerError, + pymemcache.exceptions.MemcacheUnexpectedCloseError, + ) + + @staticmethod + def _parse_host_string(host_string): + split = host_string.split(":") + if len(split) == 1: + return split[0], 11211 + + elif len(split) == 2: + host, port = split + try: + port = int(port) + return host, port + except ValueError: + pass + + raise ValueError("Invalid memcached host_string: {}".format(host_string)) + + @staticmethod + def _key(key): + encoded = base64.b64encode(key) + if len(encoded) > 250: + encoded = hashlib.sha1(encoded).hexdigest() + return encoded + + @classmethod + def from_environment(cls, max_pool_size=4, strict_read=False, strict_write=True): + """Generate a ``pymemcache.Client`` from an environment variable. + + This class method looks for the ``MEMCACHED_HOSTS`` environment + variable and, if it is set, parses the value as a space delimited list of + hostnames, optionally with ports. For example: + + "localhost" + "localhost:11211" + "1.1.1.1:11211 2.2.2.2:11211 3.3.3.3:11211" + + Args: + max_pool_size (int): Size of connection pool to be used by client. If set to + ``0`` or ``1``, connection pooling will not be used. Default: ``4`` + strict_read (bool): If :data:`False`, connection errors during read + operations will be logged with a warning and treated as cache misses, + but will not raise an exception in the application, with connection + errors during reads being treated as cache misses. If :data:`True`, in + the event of connection errors, cache operations will be retried a + number of times before eventually raising the connection error to the + application layer, if it does not resolve after retrying. Setting this + to :data:`True` will cause NDB operations to take longer to complete if + there are transient errors in the cache layer. Default: :data:`False`. + strict_write (bool): If :data:`False`, connection errors during write + operations will be logged with a warning, but will not raise an + exception in the application. If :data:`True`, connection errors during + write will be raised as exceptions in the application. Because write + operations involve cache invalidation, setting this to :data:`False` may + allow other clients to retrieve stale data from the cache. If + :data:`True`, in the event of connection errors, cache operations will + be retried a number of times before eventually raising the connection + error to the application layer, if it does not resolve after retrying. + Setting this to :data:`True` will cause NDB operations to take longer to + complete if there are transient errors in the cache layer. Default: + :data:`True`. + + Returns: + Optional[MemcacheCache]: A :class:`MemcacheCache` instance or + :data:`None`, if ``MEMCACHED_HOSTS`` is not set in the + environment. + """ + hosts_string = os.environ.get("MEMCACHED_HOSTS") + if not hosts_string: + return None + + hosts = [ + cls._parse_host_string(host_string.strip()) + for host_string in hosts_string.split() + ] + + if not max_pool_size: + max_pool_size = 1 + + if len(hosts) == 1: + client = pymemcache.PooledClient(hosts[0], max_pool_size=max_pool_size) + + else: + client = pymemcache.HashClient( + hosts, use_pooling=True, max_pool_size=max_pool_size + ) + + return cls(client, strict_read=strict_read, strict_write=strict_write) + + def __init__(self, client, strict_read=False, strict_write=True): + self.client = client + self.strict_read = strict_read + self.strict_write = strict_write + self._cas = threading.local() + + @property + def caskeys(self): + local = self._cas + if not hasattr(local, "caskeys"): + local.caskeys = {} + return local.caskeys + + def get(self, keys): + """Implements :meth:`GlobalCache.get`.""" + keys = [self._key(key) for key in keys] + result = self.client.get_many(keys) + return [result.get(key) for key in keys] + + def set(self, items, expires=None): + """Implements :meth:`GlobalCache.set`.""" + expires = expires if expires else 0 + orig_items = items + items = {} + orig_keys = {} + for orig_key, value in orig_items.items(): + key = self._key(orig_key) + orig_keys[key] = orig_key + items[key] = value + + unset_keys = self.client.set_many(items, expire=expires, noreply=False) + if unset_keys: + unset_keys = [orig_keys[key] for key in unset_keys] + warnings.warn( + "Keys failed to set in memcache: {}".format(unset_keys), + RuntimeWarning, + ) + return {key: MemcacheCache.KeyNotSet(key) for key in unset_keys} + + def set_if_not_exists(self, items, expires=None): + """Implements :meth:`GlobalCache.set_if_not_exists`.""" + expires = expires if expires else 0 + results = {} + for key, value in items.items(): + results[key] = self.client.add( + self._key(key), value, expire=expires, noreply=False + ) + + return results + + def delete(self, keys): + """Implements :meth:`GlobalCache.delete`.""" + keys = [self._key(key) for key in keys] + self.client.delete_many(keys) + + def watch(self, items): + """Implements :meth:`GlobalCache.watch`.""" + caskeys = self.caskeys + keys = [] + prev_values = {} + for key, prev_value in items.items(): + key = self._key(key) + keys.append(key) + prev_values[key] = prev_value + + for key, (value, caskey) in self.client.gets_many(keys).items(): + if prev_values[key] == value: + caskeys[key] = caskey + + def unwatch(self, keys): + """Implements :meth:`GlobalCache.unwatch`.""" + keys = [self._key(key) for key in keys] + caskeys = self.caskeys + for key in keys: + caskeys.pop(key, None) + + def compare_and_swap(self, items, expires=None): + """Implements :meth:`GlobalCache.compare_and_swap`.""" + caskeys = self.caskeys + results = {} + for orig_key, value in items.items(): + key = self._key(orig_key) + caskey = caskeys.pop(key, None) + if caskey is None: + continue + + expires = expires if expires else 0 + results[orig_key] = bool( + self.client.cas(key, value, caskey, expire=expires, noreply=False) + ) + + return results + + def clear(self): + """Implements :meth:`GlobalCache.clear`.""" + self.client.flush_all() diff --git a/packages/google-cloud-ndb/google/cloud/ndb/key.py b/packages/google-cloud-ndb/google/cloud/ndb/key.py new file mode 100644 index 000000000000..b168e55a190e --- /dev/null +++ b/packages/google-cloud-ndb/google/cloud/ndb/key.py @@ -0,0 +1,1613 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Provides a :class:`.Key` for Google Cloud Datastore. + +.. testsetup:: * + + from google.cloud import ndb + +A key encapsulates the following pieces of information, which together +uniquely designate a (possible) entity in Google Cloud Datastore: + +* a Google Cloud Platform project (a string) +* a list of one or more ``(kind, id)`` pairs where ``kind`` is a string + and ``id`` is either a string or an integer +* an optional database (a string) +* an optional namespace (a string) + +The application ID must always be part of the key, but since most +applications can only access their own entities, it defaults to the +current application ID and you rarely need to worry about it. + +The database is an optional database ID. If unspecified, it defaults +to that of the client. +For usage in Cloud NDB, the default database should always be referred +to as an empty string; please do not use "(default)". + +The namespace designates a top-level partition of the key space for a +particular application. If you've never heard of namespaces, you can +safely ignore this feature. + +Most of the action is in the ``(kind, id)`` pairs. A key must have at +least one ``(kind, id)`` pair. The last ``(kind, id)`` pair gives the kind +and the ID of the entity that the key refers to, the others merely +specify a "parent key". + +The kind is a string giving the name of the model class used to +represent the entity. In more traditional databases this would be +the table name. A model class is a Python class derived from +:class:`.Model`. Only the class name itself is used as the kind. This means +all your model classes must be uniquely named within one application. You can +override this on a per-class basis. + +The ID is either a string or an integer. When the ID is a string, the +application is in control of how it assigns IDs. For example, you +could use an email address as the ID for Account entities. + +To use integer IDs, it's common to let the datastore choose a unique ID for +an entity when first inserted into the datastore. The ID can be set to +:data:`None` to represent the key for an entity that hasn't yet been +inserted into the datastore. The completed key (including the assigned ID) +will be returned after the entity is successfully inserted into the datastore. + +A key for which the ID of the last ``(kind, id)`` pair is set to :data:`None` +is called an **incomplete key** or **partial key**. Such keys can only be used +to insert entities into the datastore. + +A key with exactly one ``(kind, id)`` pair is called a top level key or a +root key. Top level keys are also used as entity groups, which play a +role in transaction management. + +If there is more than one ``(kind, id)`` pair, all but the last pair +represent the "ancestor path", also known as the key of the "parent entity". + +Other constraints: + +* Kinds and string IDs must not be empty and must be at most 1500 bytes + long (after UTF-8 encoding) +* Integer IDs must be at least ``1`` and at most ``2**63 - 1`` (i.e. the + positive part of the range for a 64-bit signed integer) + +In the "legacy" Google App Engine runtime, the default namespace could be +set via the namespace manager (``google.appengine.api.namespace_manager``). +On the gVisor Google App Engine runtime (e.g. Python 3.7), the namespace +manager is not available so the default is to have an unset or empty +namespace. To explicitly select the empty namespace pass ``namespace=""``. +""" + + +import base64 +import functools + +from google.cloud.datastore import _app_engine_key_pb2 +from google.cloud.datastore import key as _key_module +import google.cloud.datastore + +from google.cloud.ndb import exceptions +from google.cloud.ndb import _options +from google.cloud.ndb import tasklets +from google.cloud.ndb import utils + +__all__ = ["Key", "UNDEFINED"] +_APP_ID_ENVIRONMENT = "APPLICATION_ID" +_APP_ID_DEFAULT = "_" +_WRONG_TYPE = "Cannot construct Key reference on non-Key class; received {!r}" +_REFERENCE_APP_MISMATCH = ( + "Key reference constructed uses a different app {!r} than the one specified {!r}" +) +_REFERENCE_DATABASE_MISMATCH = "Key reference constructed uses a different database {!r} than the one specified {!r}" +_REFERENCE_NAMESPACE_MISMATCH = ( + "Key reference constructed uses a different namespace {!r} than " + "the one specified {!r}" +) +_INVALID_ID_TYPE = "Key ID must be a string or a number; received {!r}" +_NO_LEGACY = "The `google.appengine.ext.db` module is not available." +_MAX_INTEGER_ID = 0x7FFFFFFFFFFFFFFF # 2 ** 63 - 1 +_MAX_KEYPART_BYTES = 1500 +_BAD_KIND = "Key kind string must be a non-empty string up to {:d} bytes; received {}" +_BAD_INTEGER_ID = "Key ID number is outside of range [1, 2^63 - 1]; received {:d}" +_BAD_STRING_ID = ( + "Key name strings must be non-empty strings up to {:d} bytes; received {}" +) + +UNDEFINED = object() +"""Sentinel value. + +Used to indicate a database or namespace hasn't been explicitly set in key construction. +Used to distinguish between not passing a value and passing `None`, which +indicates the default database/namespace. +""" + + +class Key(object): + """An immutable datastore key. + + For flexibility and convenience, multiple constructor signatures are + supported. + + The primary way to construct a key is using positional arguments: + + .. testsetup:: * + + from unittest import mock + from google.cloud.ndb import context as context_module + client = mock.Mock( + project="testing", + database=None, + namespace=None, + stub=mock.Mock(spec=()), + spec=("project", "database", "namespace", "stub"), + ) + context = context_module.Context(client).use() + context.__enter__() + kind1, id1 = "Parent", "C" + kind2, id2 = "Child", 42 + + .. testcleanup:: * + + context.__exit__(None, None, None) + + .. doctest:: key-constructor-primary + + >>> ndb.Key(kind1, id1, kind2, id2) + Key('Parent', 'C', 'Child', 42) + + This is shorthand for either of the following two longer forms: + + .. doctest:: key-constructor-flat-or-pairs + + >>> ndb.Key(pairs=[(kind1, id1), (kind2, id2)]) + Key('Parent', 'C', 'Child', 42) + >>> ndb.Key(flat=[kind1, id1, kind2, id2]) + Key('Parent', 'C', 'Child', 42) + + Either of the above constructor forms can additionally pass in another + key via the ``parent`` keyword. The ``(kind, id)`` pairs of the parent key + are inserted before the ``(kind, id)`` pairs passed explicitly. + + .. doctest:: key-constructor-parent + + >>> parent = ndb.Key(kind1, id1) + >>> parent + Key('Parent', 'C') + >>> ndb.Key(kind2, id2, parent=parent) + Key('Parent', 'C', 'Child', 42) + + You can also construct a Key from a "urlsafe" encoded string: + + .. doctest:: key-constructor-urlsafe + + >>> ndb.Key(urlsafe=b"agdleGFtcGxlcgsLEgRLaW5kGLkKDA") + Key('Kind', 1337, project='example') + + For rare use cases the following constructors exist: + + .. testsetup:: key-constructor-rare + + from google.cloud.datastore import _app_engine_key_pb2 + reference = _app_engine_key_pb2.Reference( + app="example", + path=_app_engine_key_pb2.Path(element=[ + _app_engine_key_pb2.Path.Element(type="Kind", id=1337), + ]), + ) + + .. doctest:: key-constructor-rare + + >>> # Passing in a low-level Reference object + >>> reference + app: "example" + path { + element { + type: "Kind" + id: 1337 + } + } + + >>> ndb.Key(reference=reference) + Key('Kind', 1337, project='example') + >>> # Passing in a serialized low-level Reference + >>> serialized = reference.SerializeToString() + >>> serialized + b'j\\x07exampler\\x0b\\x0b\\x12\\x04Kind\\x18\\xb9\\n\\x0c' + >>> ndb.Key(serialized=serialized) + Key('Kind', 1337, project='example') + >>> # For unpickling, the same as ndb.Key(**kwargs) + >>> kwargs = {"pairs": [("Cheese", "Cheddar")], "namespace": "good"} + >>> ndb.Key(kwargs) + Key('Cheese', 'Cheddar', namespace='good') + + The "urlsafe" string is really a websafe-base64-encoded serialized + ``Reference``, but it's best to think of it as just an opaque unique + string. + + If a ``Reference`` is passed (using one of the ``reference``, + ``serialized`` or ``urlsafe`` keywords), the positional arguments and + ``namespace`` must match what is already present in the ``Reference`` + (after decoding if necessary). The parent keyword cannot be combined with + a ``Reference`` in any form. + + Keys are immutable, which means that a Key object cannot be modified + once it has been created. This is enforced by the implementation as + well as Python allows. + + Keys also support interaction with the datastore; the methods :meth:`get`, + :meth:`get_async`, :meth:`delete` and :meth:`delete_async` are + the only ones that engage in any kind of I/O activity. + + Keys may be pickled. + + Subclassing Key is best avoided; it would be hard to get right. + + Args: + path_args (Union[Tuple[str, ...], Tuple[Dict]]): Either a tuple of + ``(kind, id)`` pairs or a single dictionary containing only keyword + arguments. + reference (Optional[\ + ~google.cloud.datastore._app_engine_key_pb2.Reference]): A + reference protobuf representing a key. + serialized (Optional[bytes]): A reference protobuf serialized to bytes. + urlsafe (Optional[bytes]): A reference protobuf serialized to bytes. The + raw bytes are then converted to a websafe base64-encoded string. + pairs (Optional[Iterable[Tuple[str, Union[str, int]]]]): An iterable + of ``(kind, id)`` pairs. If this argument is used, then + ``path_args`` should be empty. + flat (Optional[Iterable[Union[str, int]]]): An iterable of the + ``(kind, id)`` pairs but flattened into a single value. For + example, the pairs ``[("Parent", 1), ("Child", "a")]`` would be + flattened to ``["Parent", 1, "Child", "a"]``. + project (Optional[str]): The Google Cloud Platform project (previously + on Google App Engine, this was called the Application ID). + app (Optional[str]): DEPRECATED: Synonym for ``project``. + namespace (Optional[str]): The namespace for the key. + parent (Optional[Key]): The parent of the key being + constructed. If provided, the key path will be **relative** to the + parent key's path. + database (Optional[str]): The database to use. + Defaults to that of the client if a parent was specified, and + to the default database if it was not. + + Raises: + TypeError: If none of ``reference``, ``serialized``, ``urlsafe``, + ``pairs`` or ``flat`` is provided as an argument and no positional + arguments were given with the path. + """ + + _hash_value = None + + def __new__(cls, *path_args, **kwargs): + _constructor_handle_positional(path_args, kwargs) + instance = super(Key, cls).__new__(cls) + + if "reference" in kwargs or "serialized" in kwargs or "urlsafe" in kwargs: + ds_key, reference = _parse_from_ref(cls, **kwargs) + elif "pairs" in kwargs or "flat" in kwargs: + ds_key = _parse_from_args(**kwargs) + reference = None + else: + raise TypeError("Key() cannot create a Key instance without arguments.") + + instance._key = ds_key + instance._reference = reference + return instance + + @classmethod + def _from_ds_key(cls, ds_key): + """Factory constructor for a :class:`~google.cloud.datastore.key.Key`. + + This bypasses the actual constructor and directly sets the ``_key`` + attribute to ``ds_key``. + + Args: + ds_key (~google.cloud.datastore.key.Key): A key from + ``google-cloud-datastore``. + + Returns: + Key: The constructed :class:`Key`. + """ + key = super(Key, cls).__new__(cls) + key._key = ds_key + key._reference = None + return key + + def __repr__(self): + """String representation used by :class:`str() ` and :func:`repr`. + + We produce a short string that conveys all relevant information, + suppressing project, database, and namespace when they are equal to their + respective defaults. + + In many cases, this string should be able to be used to invoke the constructor. + + For example: + + .. doctest:: key-repr + + >>> key = ndb.Key("hi", 100) + >>> repr(key) + "Key('hi', 100)" + >>> + >>> key = ndb.Key( + ... "bye", "hundred", project="specific", database="db", namespace="space", + ... ) + >>> str(key) + "Key('bye', 'hundred', project='specific', database='db', namespace='space')" + """ + args = ["{!r}".format(item) for item in self.flat()] + if self.project() != _project_from_app(None): + args.append("project={!r}".format(self.app())) + if self.database(): + args.append("database={!r}".format(self.database())) + if self.namespace() is not None: + args.append("namespace={!r}".format(self.namespace())) + + return "Key({})".format(", ".join(args)) + + def __str__(self): + """Alias for :meth:`__repr__`.""" + return self.__repr__() + + def __hash__(self): + """Hash value, for use in dictionary lookups. + + .. note:: + + This ignores ``app``, ``database``, and ``namespace``. Since :func:`hash` isn't + expected to return a unique value (it just reduces the chance of + collision), this doesn't try to increase entropy by including other + values. The primary concern is that hashes of equal keys are + equal, not the other way around. + """ + hash_value = self._hash_value + if hash_value is None: + self._hash_value = hash_value = hash(self.pairs()) + return hash_value + + def _tuple(self): + """Helper to return an orderable tuple.""" + return (self.app(), self.namespace(), self.database() or "", self.pairs()) + + def __eq__(self, other): + """Equality comparison operation.""" + if not isinstance(other, Key): + return NotImplemented + + return self._tuple() == other._tuple() + + def __ne__(self, other): + """The opposite of __eq__.""" + if not isinstance(other, Key): + return NotImplemented + return not self.__eq__(other) + + def __lt__(self, other): + """Less than ordering.""" + if not isinstance(other, Key): + raise TypeError + return self._tuple() < other._tuple() + + def __le__(self, other): + """Less than or equal ordering.""" + if not isinstance(other, Key): + raise TypeError + return self._tuple() <= other._tuple() + + def __gt__(self, other): + """Greater than ordering.""" + if not isinstance(other, Key): + raise TypeError + return not self <= other + + def __ge__(self, other): + """Greater than or equal ordering.""" + if not isinstance(other, Key): + raise TypeError + return not self < other + + def __getstate__(self): + """Private API used for pickling. + + Returns: + Tuple[Dict[str, Any]]: A tuple containing a single dictionary of + state to pickle. The dictionary has four keys: ``pairs``, ``app``, + ``database``, and ``namespace``. + """ + to_pickle = ( + { + "pairs": self.pairs(), + "app": self.app(), + "namespace": self.namespace(), + }, + ) + if self.database(): + to_pickle[0]["database"] = self.database() + return to_pickle + + def __setstate__(self, state): + """Private API used for unpickling. + + Args: + state (Tuple[Dict[str, Any]]): A tuple containing a single + dictionary of pickled state. This should match the signature + returned from :func:`__getstate__`, in particular, it should + have four keys: ``pairs``, ``app``, ``database``, and ``namespace``. + + Raises: + TypeError: If the ``state`` does not have length 1. + TypeError: If the single element in ``state`` is not a dictionary. + """ + if len(state) != 1: + msg = "Invalid state length, expected 1; received {:d}".format(len(state)) + raise TypeError(msg) + + kwargs = state[0] + if not isinstance(kwargs, dict): + raise TypeError( + "Key accepts a dict of keyword arguments as state; " + "received {!r}".format(kwargs) + ) + + flat = _get_path(None, kwargs["pairs"]) + _clean_flat_path(flat) + project = _project_from_app(kwargs["app"]) + + database = None + if "database" in kwargs: + database = kwargs["database"] + + self._key = _key_module.Key( + *flat, + project=project, + namespace=kwargs["namespace"], + database=database, + ) + self._reference = None + + def __getnewargs__(self): + """Private API used to specify ``__new__`` arguments when unpickling. + + .. note:: + + This method is provided for backwards compatibility, though it + isn't needed. + + Returns: + Tuple[Dict[str, Any]]: A tuple containing a single dictionary of + state to pickle. The dictionary has four keys: ``pairs``, ``app``, + ``database`` and ``namespace``. + """ + return ( + { + "pairs": self.pairs(), + "app": self.app(), + "namespace": self.namespace(), + "database": self.database() if self.database() is not None else None, + }, + ) + + def parent(self): + """Parent key constructed from all but the last ``(kind, id)`` pairs. + + If there is only one ``(kind, id)`` pair, return :data:`None`. + + .. doctest:: key-parent + + >>> key = ndb.Key( + ... pairs=[ + ... ("Purchase", "Food"), + ... ("Type", "Drink"), + ... ("Coffee", 11), + ... ] + ... ) + >>> parent = key.parent() + >>> parent + Key('Purchase', 'Food', 'Type', 'Drink') + >>> + >>> grandparent = parent.parent() + >>> grandparent + Key('Purchase', 'Food') + >>> + >>> grandparent.parent() is None + True + """ + if self._key.parent is None: + return None + return Key._from_ds_key(self._key.parent) + + def root(self): + """The root key. + + This is either the current key or the highest parent. + + .. doctest:: key-root + + >>> key = ndb.Key("a", 1, "steak", "sauce") + >>> root_key = key.root() + >>> root_key + Key('a', 1) + >>> root_key.root() is root_key + True + """ + root_key = self._key + while root_key.parent is not None: + root_key = root_key.parent + + if root_key is self._key: + return self + + return Key._from_ds_key(root_key) + + def namespace(self): + """The namespace for the key, if set. + + .. doctest:: key-namespace + + >>> key = ndb.Key("A", "B") + >>> key.namespace() is None + True + >>> + >>> key = ndb.Key("A", "B", namespace="rock") + >>> key.namespace() + 'rock' + """ + return self._key.namespace + + def project(self): + """The project ID for the key. + + .. warning:: + + This **may** differ from the original ``app`` passed in to the + constructor. This is because prefixed application IDs like + ``s~example`` are "legacy" identifiers from Google App Engine. + They have been replaced by equivalent project IDs, e.g. here it + would be ``example``. + + .. doctest:: key-app + + >>> key = ndb.Key("A", "B", project="s~example") + >>> key.project() + 'example' + >>> + >>> key = ndb.Key("A", "B", project="example") + >>> key.project() + 'example' + """ + return self._key.project + + app = project + + def database(self): + """The database ID for the key. + + .. doctest:: key-database + + >>> key = ndb.Key("A", "B", database="mydb") + >>> key.database() + 'mydb' + """ + return self._key.database + + def id(self): + """The string or integer ID in the last ``(kind, id)`` pair, if any. + + .. doctest:: key-id + + >>> key_int = ndb.Key("A", 37) + >>> key_int.id() + 37 + >>> key_str = ndb.Key("A", "B") + >>> key_str.id() + 'B' + >>> key_partial = ndb.Key("A", None) + >>> key_partial.id() is None + True + """ + return self._key.id_or_name + + def string_id(self): + """The string ID in the last ``(kind, id)`` pair, if any. + + .. doctest:: key-string-id + + >>> key_int = ndb.Key("A", 37) + >>> key_int.string_id() is None + True + >>> key_str = ndb.Key("A", "B") + >>> key_str.string_id() + 'B' + >>> key_partial = ndb.Key("A", None) + >>> key_partial.string_id() is None + True + """ + return self._key.name + + def integer_id(self): + """The integer ID in the last ``(kind, id)`` pair, if any. + + .. doctest:: key-integer-id + + >>> key_int = ndb.Key("A", 37) + >>> key_int.integer_id() + 37 + >>> key_str = ndb.Key("A", "B") + >>> key_str.integer_id() is None + True + >>> key_partial = ndb.Key("A", None) + >>> key_partial.integer_id() is None + True + """ + return self._key.id + + def pairs(self): + """The ``(kind, id)`` pairs for the key. + + .. doctest:: key-pairs + + >>> key = ndb.Key("Satellite", "Moon", "Space", "Dust") + >>> key.pairs() + (('Satellite', 'Moon'), ('Space', 'Dust')) + >>> + >>> partial_key = ndb.Key("Known", None) + >>> partial_key.pairs() + (('Known', None),) + """ + flat = self.flat() + pairs = [] + for i in range(0, len(flat), 2): + pairs.append(flat[i : i + 2]) # noqa: E203 + return tuple(pairs) + + def flat(self): + """The flat path for the key. + + .. doctest:: key-flat + + >>> key = ndb.Key("Satellite", "Moon", "Space", "Dust") + >>> key.flat() + ('Satellite', 'Moon', 'Space', 'Dust') + >>> + >>> partial_key = ndb.Key("Known", None) + >>> partial_key.flat() + ('Known', None) + """ + flat_path = self._key.flat_path + if len(flat_path) % 2 == 1: + flat_path += (None,) + return flat_path + + def kind(self): + """The kind of the entity referenced. + + This comes from the last ``(kind, id)`` pair. + + .. doctest:: key-kind + + >>> key = ndb.Key("Satellite", "Moon", "Space", "Dust") + >>> key.kind() + 'Space' + >>> + >>> partial_key = ndb.Key("Known", None) + >>> partial_key.kind() + 'Known' + """ + return self._key.kind + + def reference(self): + """The ``Reference`` protobuf object for this key. + + The return value will be stored on the current key, so the caller + promises not to mutate it. + + .. doctest:: key-reference + + >>> key = ndb.Key("Trampoline", 88, project="xy", database="wv", namespace="zt") + >>> key.reference() + app: "xy" + path { + element { + type: "Trampoline" + id: 88 + } + } + name_space: "zt" + database_id: "wv" + + """ + if self._reference is None: + if self._key.database: + self._reference = _app_engine_key_pb2.Reference( + app=self._key.project, + path=_to_legacy_path(self._key.path), + database_id=self._key.database, + name_space=self._key.namespace, + ) + else: + self._reference = _app_engine_key_pb2.Reference( + app=self._key.project, + path=_to_legacy_path(self._key.path), + name_space=self._key.namespace, + ) + return self._reference + + def serialized(self): + """A ``Reference`` protobuf serialized to bytes. + + .. doctest:: key-serialized + + >>> key = ndb.Key("Kind", 1337, project="example", database="example-db") + >>> key.serialized() + b'j\\x07exampler\\x0b\\x0b\\x12\\x04Kind\\x18\\xb9\\n\\x0c\\xba\\x01\\nexample-db' + """ + reference = self.reference() + return reference.SerializeToString() + + def urlsafe(self): + """A ``Reference`` protobuf serialized and encoded as urlsafe base 64. + + .. doctest:: key-urlsafe + + >>> key = ndb.Key("Kind", 1337, project="example") + >>> key.urlsafe() + b'agdleGFtcGxlcgsLEgRLaW5kGLkKDA' + """ + raw_bytes = self.serialized() + return base64.urlsafe_b64encode(raw_bytes).strip(b"=") + + def to_legacy_urlsafe(self, location_prefix): + """ + A urlsafe serialized ``Reference`` protobuf with an App Engine prefix. + + This will produce a urlsafe string which includes an App Engine + location prefix ("partition"), compatible with the Google Datastore + admin console. + + This only supports the default database. For a named database, + please use urlsafe() instead. + + Arguments: + location_prefix (str): A location prefix ("partition") to be + prepended to the key's `project` when serializing the key. A + typical value is "s~", but "e~" or other partitions are + possible depending on the project's region and other factors. + + .. doctest:: key-legacy-urlsafe + + >>> key = ndb.Key("Kind", 1337, project="example") + >>> key.to_legacy_urlsafe("s~") + b'aglzfmV4YW1wbGVyCwsSBEtpbmQYuQoM' + """ + if self._key.database: + raise ValueError("to_legacy_urlsafe only supports the default database") + return google.cloud.datastore.Key( + *self.flat(), + **{"namespace": self._key.namespace, "project": self._key.project}, + ).to_legacy_urlsafe(location_prefix=location_prefix) + + @_options.ReadOptions.options + @utils.positional(1) + def get( + self, + read_consistency=None, + read_policy=None, + transaction=None, + retries=None, + timeout=None, + deadline=None, + use_cache=None, + use_global_cache=None, + use_datastore=None, + global_cache_timeout=None, + use_memcache=None, + memcache_timeout=None, + max_memcache_items=None, + force_writes=None, + _options=None, + ): + """Synchronously get the entity for this key. + + Returns the retrieved :class:`.Model` or :data:`None` if there is no + such entity. + + Args: + read_consistency: Set this to ``ndb.EVENTUAL`` if, instead of + waiting for the Datastore to finish applying changes to all + returned results, you wish to get possibly-not-current results + faster. You can't do this if using a transaction. + transaction (bytes): Any results returned will be consistent with + the Datastore state represented by this transaction id. + Defaults to the currently running transaction. Cannot be used + with ``read_consistency=ndb.EVENTUAL``. + retries (int): Number of times to retry this operation in the case + of transient server errors. Operation will potentially be tried + up to ``retries`` + 1 times. Set to ``0`` to try operation only + once, with no retries. + timeout (float): Override the gRPC timeout, in seconds. + deadline (float): DEPRECATED: Synonym for ``timeout``. + use_cache (bool): Specifies whether to store entities in in-process + cache; overrides in-process cache policy for this operation. + use_global_cache (bool): Specifies whether to store entities in + global cache; overrides global cache policy for this operation. + use_datastore (bool): Specifies whether to store entities in + Datastore; overrides Datastore policy for this operation. + global_cache_timeout (int): Maximum lifetime for entities in global + cache; overrides global cache timeout policy for this + operation. + use_memcache (bool): DEPRECATED: Synonym for ``use_global_cache``. + memcache_timeout (int): DEPRECATED: Synonym for + ``global_cache_timeout``. + max_memcache_items (int): No longer supported. + read_policy: DEPRECATED: Synonym for ``read_consistency``. + force_writes (bool): No longer supported. + + Returns: + Union[:class:`.Model`, :data:`None`] + """ + return self.get_async(_options=_options).result() + + @_options.ReadOptions.options + @utils.positional(1) + def get_async( + self, + read_consistency=None, + read_policy=None, + transaction=None, + retries=None, + timeout=None, + deadline=None, + use_cache=None, + use_global_cache=None, + use_datastore=None, + global_cache_timeout=None, + use_memcache=None, + memcache_timeout=None, + max_memcache_items=None, + force_writes=None, + _options=None, + ): + """Asynchronously get the entity for this key. + + The result for the returned future will either be the retrieved + :class:`.Model` or :data:`None` if there is no such entity. + + Args: + read_consistency: Set this to ``ndb.EVENTUAL`` if, instead of + waiting for the Datastore to finish applying changes to all + returned results, you wish to get possibly-not-current results + faster. You can't do this if using a transaction. + transaction (bytes): Any results returned will be consistent with + the Datastore state represented by this transaction id. + Defaults to the currently running transaction. Cannot be used + with ``read_consistency=ndb.EVENTUAL``. + retries (int): Number of times to retry this operation in the case + of transient server errors. Operation will potentially be tried + up to ``retries`` + 1 times. Set to ``0`` to try operation only + once, with no retries. + timeout (float): Override the gRPC timeout, in seconds. + deadline (float): DEPRECATED: Synonym for ``timeout``. + use_cache (bool): Specifies whether to store entities in in-process + cache; overrides in-process cache policy for this operation. + use_global_cache (bool): Specifies whether to store entities in + global cache; overrides global cache policy for this operation. + use_datastore (bool): Specifies whether to store entities in + Datastore; overrides Datastore policy for this operation. + global_cache_timeout (int): Maximum lifetime for entities in global + cache; overrides global cache timeout policy for this + operation. + use_memcache (bool): DEPRECATED: Synonym for ``use_global_cache``. + memcache_timeout (int): DEPRECATED: Synonym for + ``global_cache_timeout``. + max_memcache_items (int): No longer supported. + read_policy: DEPRECATED: Synonym for ``read_consistency``. + force_writes (bool): No longer supported. + + Returns: + :class:`~google.cloud.ndb.tasklets.Future` + """ + # Avoid circular import in Python 2.7 + from google.cloud.ndb import model + from google.cloud.ndb import context as context_module + from google.cloud.ndb import _datastore_api + + cls = model.Model._kind_map.get(self.kind()) + + if cls: + cls._pre_get_hook(self) + + @tasklets.tasklet + def get(): + context = context_module.get_context() + use_cache = context._use_cache(self, _options) + + if use_cache: + try: + # This result may be None, if None is cached for this key. + result = context.cache.get_and_validate(self) + except KeyError: + pass + else: + raise tasklets.Return(result) + + entity_pb = yield _datastore_api.lookup(self._key, _options) + if entity_pb is not _datastore_api._NOT_FOUND: + result = model._entity_from_protobuf(entity_pb) + else: + result = None + + if use_cache: + context.cache[self] = result + + raise tasklets.Return(result) + + future = get() + if cls: + future.add_done_callback(functools.partial(cls._post_get_hook, self)) + return future + + @_options.Options.options + @utils.positional(1) + def delete( + self, + retries=None, + timeout=None, + deadline=None, + use_cache=None, + use_global_cache=None, + use_datastore=None, + global_cache_timeout=None, + use_memcache=None, + memcache_timeout=None, + max_memcache_items=None, + force_writes=None, + _options=None, + ): + """Synchronously delete the entity for this key. + + This is a no-op if no such entity exists. + + Note: + If in a transaction, the entity can only be deleted at transaction + commit time. In that case, this function will schedule the entity + to be deleted as part of the transaction and will return + immediately, which is effectively the same as calling + :meth:`delete_async` and ignoring the returned future. If not in a + transaction, this function will block synchronously until the + entity is deleted, as one would expect. + + Args: + timeout (float): Override the gRPC timeout, in seconds. + deadline (float): DEPRECATED: Synonym for ``timeout``. + use_cache (bool): Specifies whether to store entities in in-process + cache; overrides in-process cache policy for this operation. + use_global_cache (bool): Specifies whether to store entities in + global cache; overrides global cache policy for this operation. + use_datastore (bool): Specifies whether to store entities in + Datastore; overrides Datastore policy for this operation. + global_cache_timeout (int): Maximum lifetime for entities in global + cache; overrides global cache timeout policy for this + operation. + use_memcache (bool): DEPRECATED: Synonym for ``use_global_cache``. + memcache_timeout (int): DEPRECATED: Synonym for + ``global_cache_timeout``. + max_memcache_items (int): No longer supported. + force_writes (bool): No longer supported. + """ + # Avoid circular import in Python 2.7 + from google.cloud.ndb import _transaction + + future = self.delete_async(_options=_options) + if not _transaction.in_transaction(): + return future.result() + + @_options.Options.options + @utils.positional(1) + def delete_async( + self, + retries=None, + timeout=None, + deadline=None, + use_cache=None, + use_global_cache=None, + use_datastore=None, + global_cache_timeout=None, + use_memcache=None, + memcache_timeout=None, + max_memcache_items=None, + force_writes=None, + _options=None, + ): + """Schedule deletion of the entity for this key. + + The result of the returned future becomes available once the + deletion is complete. In all cases the future's result is :data:`None` + (i.e. there is no way to tell whether the entity existed or not). + + Args: + timeout (float): Override the gRPC timeout, in seconds. + deadline (float): DEPRECATED: Synonym for ``timeout``. + use_cache (bool): Specifies whether to store entities in in-process + cache; overrides in-process cache policy for this operation. + use_global_cache (bool): Specifies whether to store entities in + global cache; overrides global cache policy for this operation. + use_datastore (bool): Specifies whether to store entities in + Datastore; overrides Datastore policy for this operation. + global_cache_timeout (int): Maximum lifetime for entities in global + cache; overrides global cache timeout policy for this + operation. + use_memcache (bool): DEPRECATED: Synonym for ``use_global_cache``. + memcache_timeout (int): DEPRECATED: Synonym for + ``global_cache_timeout``. + max_memcache_items (int): No longer supported. + force_writes (bool): No longer supported. + """ + # Avoid circular import in Python 2.7 + from google.cloud.ndb import model + from google.cloud.ndb import context as context_module + from google.cloud.ndb import _datastore_api + + cls = model.Model._kind_map.get(self.kind()) + if cls: + cls._pre_delete_hook(self) + + @tasklets.tasklet + def delete(): + result = yield _datastore_api.delete(self._key, _options) + + context = context_module.get_context() + if context._use_cache(self, _options): + context.cache[self] = None + + raise tasklets.Return(result) + + future = delete() + + if cls: + future.add_done_callback(functools.partial(cls._post_delete_hook, self)) + + return future + + @classmethod + def from_old_key(cls, old_key): + """Factory constructor to convert from an "old"-style datastore key. + + The ``old_key`` was expected to be a ``google.appengine.ext.db.Key`` + (which was an alias for ``google.appengine.api.datastore_types.Key``). + + However, the ``google.appengine.ext.db`` module was part of the legacy + Google App Engine runtime and is not generally available. + + Raises: + NotImplementedError: Always. + """ + raise NotImplementedError(_NO_LEGACY) + + def to_old_key(self): + """Convert to an "old"-style datastore key. + + See :meth:`from_old_key` for more information on why this method + is not supported. + + Raises: + NotImplementedError: Always. + """ + raise NotImplementedError(_NO_LEGACY) + + +def _project_from_app(app, allow_empty=False): + """Convert a legacy Google App Engine app string to a project. + + Args: + app (str): The application value to be used. If the caller passes + :data:`None` and ``allow_empty`` is :data:`False`, then this will + use the project set by the current client context. (See + :meth:`~client.Client.context`.) + allow_empty (bool): Flag determining if an empty (i.e. :data:`None`) + project is allowed. Defaults to :data:`False`. + + Returns: + str: The cleaned project. + """ + # Avoid circular import in Python 2.7 + from google.cloud.ndb import context as context_module + + if app is None: + if allow_empty: + return None + client = context_module.get_context().client + app = client.project + + # NOTE: This is the same behavior as in the helper + # ``google.cloud.datastore.key._clean_app()``. + parts = app.split("~", 1) + return parts[-1] + + +def _from_reference(reference, app, namespace, database): + """Convert Reference protobuf to :class:`~google.cloud.datastore.key.Key`. + + This is intended to work with the "legacy" representation of a + datastore "Key" used within Google App Engine (a so-called + "Reference"). This assumes that ``serialized`` was created within an App + Engine app via something like ``ndb.Key(...).reference()``. + + However, the actual type used here is different since this code will not + run in the App Engine standard environment where the type was + ``google.appengine.datastore.entity_pb.Reference``. + + Args: + serialized (bytes): A reference protobuf serialized to bytes. + app (Optional[str]): The application ID / project ID for the + constructed key. + namespace (Optional[str]): The namespace for the constructed key. + database (Optional[str]): The database for the constructed key. + + Returns: + google.cloud.datastore.key.Key: The key corresponding to + ``serialized``. + + Raises: + RuntimeError: If ``app`` is not :data:`None`, but not the same as + ``reference.app``. + RuntimeError: If ``database`` is not :data:`None`, but not the same as + ``reference.database_id``. + RuntimeError: If ``namespace`` is not :data:`None`, but not the same as + ``reference.name_space``. + """ + project = _project_from_app(reference.app) + if app is not None: + if _project_from_app(app) != project: + raise RuntimeError(_REFERENCE_APP_MISMATCH.format(reference.app, app)) + + parsed_database = _key_module._get_empty(reference.database_id, "") + if database is not None: + if database != parsed_database: + raise RuntimeError( + _REFERENCE_DATABASE_MISMATCH.format(reference.database_id, database) + ) + + parsed_namespace = _key_module._get_empty(reference.name_space, "") + if namespace is not None: + if namespace != parsed_namespace: + raise RuntimeError( + _REFERENCE_NAMESPACE_MISMATCH.format(reference.name_space, namespace) + ) + + flat_path = _key_module._get_flat_path(reference.path) + return google.cloud.datastore.Key( + *flat_path, + project=project, + database=parsed_database, + namespace=parsed_namespace, + ) + + +def _from_serialized(serialized, app, namespace, database): + """Convert serialized protobuf to :class:`~google.cloud.datastore.key.Key`. + + This is intended to work with the "legacy" representation of a + datastore "Key" used within Google App Engine (a so-called + "Reference"). This assumes that ``serialized`` was created within an App + Engine app via something like ``ndb.Key(...).serialized()``. + + Args: + serialized (bytes): A reference protobuf serialized to bytes. + app (Optional[str]): The application ID / project ID for the + constructed key. + namespace (Optional[str]): The namespace for the constructed key. + database (Optional[str]): The database for the constructed key. + + Returns: + Tuple[google.cloud.datastore.key.Key, .Reference]: The key + corresponding to ``serialized`` and the Reference protobuf. + """ + reference = _app_engine_key_pb2.Reference() + reference.ParseFromString(serialized) + return _from_reference(reference, app, namespace, database), reference + + +def _from_urlsafe(urlsafe, app, namespace, database): + """Convert urlsafe string to :class:`~google.cloud.datastore.key.Key`. + + .. note:: + + This is borrowed from + :meth:`~google.cloud.datastore.key.Key.from_legacy_urlsafe`. + It is provided here, rather than calling that method, since component + parts need to be re-used. + + This is intended to work with the "legacy" representation of a + datastore "Key" used within Google App Engine (a so-called + "Reference"). This assumes that ``urlsafe`` was created within an App + Engine app via something like ``ndb.Key(...).urlsafe()``. + + Args: + urlsafe (Union[bytes, str]): The base64 encoded (ASCII) string + corresponding to a datastore "Key" / "Reference". + app (Optional[str]): The application ID / project ID for the + constructed key. + namespace (Optional[str]): The namespace for the constructed key. + database (Optional[str]): The database for the constructed key. + + Returns: + Tuple[google.cloud.datastore.key.Key, .Reference]: The key + corresponding to ``urlsafe`` and the Reference protobuf. + """ + if isinstance(urlsafe, str): # pragma: NO BRANCH + urlsafe = urlsafe.encode("ascii") + padding = b"=" * (-len(urlsafe) % 4) + urlsafe += padding + raw_bytes = base64.urlsafe_b64decode(urlsafe) + return _from_serialized(raw_bytes, app, namespace, database) + + +def _constructor_handle_positional(path_args, kwargs): + """Properly handle positional arguments to Key constructor. + + This will modify ``kwargs`` in a few cases: + + * The constructor was called with a dictionary as the only + positional argument (and no keyword arguments were passed). In + this case, the contents of the dictionary passed in will be copied + into ``kwargs``. + * The constructor was called with at least one (non-dictionary) + positional argument. In this case all of the positional arguments + will be added to ``kwargs`` for the key ``flat``. + + Args: + path_args (Tuple): The positional arguments. + kwargs (Dict[str, Any]): The keyword arguments. + + Raises: + TypeError: If keyword arguments were used while the first and + only positional argument was a dictionary. + TypeError: If positional arguments were provided and the keyword + ``flat`` was used. + """ + if not path_args: + return + + if len(path_args) == 1 and isinstance(path_args[0], dict): + if kwargs: + raise TypeError( + "Key() takes no keyword arguments when a dict is the " + "the first and only non-keyword argument (for " + "unpickling)." + ) + kwargs.update(path_args[0]) + else: + if "flat" in kwargs: + raise TypeError( + "Key() with positional arguments " + "cannot accept flat as a keyword argument." + ) + kwargs["flat"] = path_args + + +def _exactly_one_specified(*values): + """Make sure exactly one of ``values`` is truthy. + + Args: + values (Tuple[Any, ...]): Some values to be checked. + + Returns: + bool: Indicating if exactly one of ``values`` was truthy. + """ + count = sum(1 for value in values if value) + return count == 1 + + +def _parse_from_ref( + klass, + reference=None, + serialized=None, + urlsafe=None, + app=None, + namespace=None, + database: str = None, + **kwargs +): + """Construct a key from a Reference. + + This makes sure that **exactly** one of ``reference``, ``serialized`` and + ``urlsafe`` is specified (all three are different representations of a + ``Reference`` protobuf). + + Args: + klass (type): The class of the instance being constructed. It must + be :class:`.Key`; we do not allow constructing :class:`.Key` + subclasses from a serialized Reference protobuf. + reference (Optional[\ + ~google.cloud.datastore._app_engine_key_pb2.Reference]): A + reference protobuf representing a key. + serialized (Optional[bytes]): A reference protobuf serialized to bytes. + urlsafe (Optional[bytes]): A reference protobuf serialized to bytes. The + raw bytes are then converted to a websafe base64-encoded string. + app (Optional[str]): The Google Cloud Platform project (previously + on Google App Engine, this was called the Application ID). + namespace (Optional[str]): The namespace for the key. + database (Optional[str]): The database for the Key. + kwargs (Dict[str, Any]): Any extra keyword arguments not covered by + the explicitly provided ones. These are passed through to indicate + to the user that the wrong combination of arguments was used, e.g. + if ``parent`` and ``urlsafe`` were used together. + + Returns: + Tuple[~.datastore.Key, \ + ~google.cloud.datastore._app_engine_key_pb2.Reference]: + A pair of the constructed key and the reference that was serialized + in one of the arguments. + + Raises: + TypeError: If ``klass`` is not :class:`.Key`. + TypeError: If ``kwargs`` isn't empty. + TypeError: If any number other than exactly one of ``reference``, + ``serialized`` or ``urlsafe`` is provided. + """ + if klass is not Key: + raise TypeError(_WRONG_TYPE.format(klass)) + + if kwargs or not _exactly_one_specified(reference, serialized, urlsafe): + raise TypeError( + "Cannot construct Key reference from incompatible " "keyword arguments." + ) + + if reference: + ds_key = _from_reference(reference, app, namespace, database) + elif serialized: + ds_key, reference = _from_serialized(serialized, app, namespace, database) + else: + # NOTE: We know here that ``urlsafe`` is truth-y; + # ``_exactly_one_specified()`` guarantees this. + ds_key, reference = _from_urlsafe(urlsafe, app, namespace, database) + + return ds_key, reference + + +def _parse_from_args( + pairs=None, + flat=None, + project=None, + app=None, + namespace=UNDEFINED, + parent=None, + database=UNDEFINED, +): + """Construct a key from the path (and possibly a parent key). + + Args: + pairs (Optional[Iterable[Tuple[str, Union[str, int]]]]): An iterable + of (kind, ID) pairs. + flat (Optional[Iterable[Union[str, int]]]): An iterable of the + (kind, ID) pairs but flattened into a single value. For example, + the pairs ``[("Parent", 1), ("Child", "a")]`` would be flattened to + ``["Parent", 1, "Child", "a"]``. + project (Optional[str]): The Google Cloud Platform project (previously + on Google App Engine, this was called the Application ID). + app (Optional[str]): DEPRECATED: Synonym for ``project``. + namespace (Optional[str]): The namespace for the key. + parent (Optional[~.ndb.key.Key]): The parent of the key being + constructed. If provided, the key path will be **relative** to the + parent key's path. + database (Optional[str]): The database for the key. + Defaults to that of the client if a parent was specified, and + to the default database if it was not. + + Returns: + ~.datastore.Key: The constructed key. + + Raises: + exceptions.BadValueError: If ``parent`` is passed but is not a ``Key``. + """ + # Avoid circular import in Python 2.7 + from google.cloud.ndb import context as context_module + + flat = _get_path(flat, pairs) + _clean_flat_path(flat) + + if project and app: + raise TypeError("Can't specify both 'project' and 'app'. They are synonyms.") + elif not app: + app = project + + parent_ds_key = None + if parent is None: + project = _project_from_app(app) + + if namespace is UNDEFINED: + namespace = context_module.get_context().get_namespace() + + if database is UNDEFINED: + database = context_module.get_context().client.database + + else: + project = _project_from_app(app, allow_empty=True) + if not isinstance(parent, Key): + raise exceptions.BadValueError( + "Expected Key instance, got {!r}".format(parent) + ) + + if namespace is UNDEFINED: + namespace = None + + if database is UNDEFINED: + database = None + + # Offload verification of parent to ``google.cloud.datastore.Key()``. + parent_ds_key = parent._key + + if database == "": + database = None + + if namespace == "": + namespace = None + + return google.cloud.datastore.Key( + *flat, + parent=parent_ds_key, + project=project, + database=database, + namespace=namespace, + ) + + +def _get_path(flat, pairs): + """Get a flat path of key arguments. + + Does this from exactly one of ``flat`` or ``pairs``. + + Args: + pairs (Optional[Iterable[Tuple[str, Union[str, int]]]]): An iterable + of (kind, ID) pairs. + flat (Optional[Iterable[Union[str, int]]]): An iterable of the + (kind, ID) pairs but flattened into a single value. For example, + the pairs ``[("Parent", 1), ("Child", "a")]`` would be flattened to + ``["Parent", 1, "Child", "a"]``. + + Returns: + List[Union[str, int]]: The flattened path as a list. + + Raises: + TypeError: If both ``flat`` and ``pairs`` are provided. + ValueError: If the ``flat`` path does not have an even number of + elements. + TypeError: If the paths are both empty. + """ + if flat: + if pairs is not None: + raise TypeError("Key() cannot accept both flat and pairs arguments.") + if len(flat) % 2: + raise ValueError("Key() must have an even number of positional arguments.") + flat = list(flat) + else: + flat = [] + for kind, id_ in pairs: + flat.extend((kind, id_)) + + if not flat: + raise TypeError("Key must consist of at least one pair.") + + return flat + + +def _clean_flat_path(flat): + """Verify and convert the flat path for a key. + + This may modify ``flat`` in place. In particular, if the last element is + :data:`None` (for a partial key), this will pop it off the end. Also + if some of the kinds are instance of :class:`.Model`, they will be + converted to strings in ``flat``. + + Args: + flat (List[Union[str, int]]): The flattened path as a list. + + Raises: + TypeError: If the kind in a pair is an invalid type. + exceptions.BadArgumentError: If a key ID is :data:`None` (indicating a partial + key), but in a pair other than the last one. + TypeError: If a key ID is not a string or integer. + """ + # Verify the inputs in ``flat``. + for i in range(0, len(flat), 2): + # Make sure the ``kind`` is either a string or a Model. + kind = flat[i] + if isinstance(kind, type): + kind = kind._get_kind() + flat[i] = kind + if not isinstance(kind, str): + raise TypeError( + "Key kind must be a string or Model class; " + "received {!r}".format(kind) + ) + # Make sure the ``id_`` is either a string or int. In the special case + # of a partial key, ``id_`` can be ``None`` for the last pair. + id_ = flat[i + 1] + if id_ is None: + if i + 2 < len(flat): + raise exceptions.BadArgumentError("Incomplete Key entry must be last") + elif not isinstance(id_, (str, int)): + raise TypeError(_INVALID_ID_TYPE.format(id_)) + + # Remove trailing ``None`` for a partial key. + if flat[-1] is None: + flat.pop() + + +def _verify_path_value(value, is_str, is_kind=False): + """Verify a key path value: one of a kind, string ID or integer ID. + + Args: + value (Union[str, int]): The value to verify + is_str (bool): Flag indicating if the ``value`` is a string. If + :data:`False`, then the ``value`` is assumed to be an integer. + is_kind (Optional[bool]): Flag indicating if the value is meant to + be a kind. Defaults to :data:`False`. + + Returns: + Union[str, int]: The ``value`` passed in, if it passed verification + checks. + + Raises: + ValueError: If the ``value`` is a ``str`` for the kind, but the number + of UTF-8 encoded bytes is outside of the range ``[1, 1500]``. + ValueError: If the ``value`` is a ``str`` for the name, but the number + of UTF-8 encoded bytes is outside of the range ``[1, 1500]``. + ValueError: If the ``value`` is an integer but lies outside of the + range ``[1, 2^63 - 1]``. + """ + if is_str: + if 1 <= len(value.encode("utf-8")) <= _MAX_KEYPART_BYTES: + return value + + if is_kind: + raise ValueError(_BAD_KIND.format(_MAX_KEYPART_BYTES, value)) + else: + raise ValueError(_BAD_STRING_ID.format(_MAX_KEYPART_BYTES, value)) + else: + if 1 <= value <= _MAX_INTEGER_ID: + return value + + raise ValueError(_BAD_INTEGER_ID.format(value)) + + +def _to_legacy_path(dict_path): + """Convert a tuple of ints and strings in a legacy "Path". + + .. note: + + This assumes, but does not verify, that each entry in + ``dict_path`` is valid (i.e. doesn't have more than one + key out of "name" / "id"). + + Args: + dict_path (Iterable[Tuple[str, Union[str, int]]]): The "structured" + path for a ``google-cloud-datastore`` key, i.e. it is a list of + dictionaries, each of which has "kind" and one of "name" / "id" as + keys. + + Returns: + _app_engine_key_pb2.Path: The legacy path corresponding to + ``dict_path``. + """ + elements = [] + for part in dict_path: + element_kwargs = {"type": _verify_path_value(part["kind"], True, is_kind=True)} + if "id" in part: + element_kwargs["id"] = _verify_path_value(part["id"], False) + elif "name" in part: + element_kwargs["name"] = _verify_path_value(part["name"], True) + element = _app_engine_key_pb2.Path.Element(**element_kwargs) + elements.append(element) + + return _app_engine_key_pb2.Path(element=elements) diff --git a/packages/google-cloud-ndb/google/cloud/ndb/metadata.py b/packages/google-cloud-ndb/google/cloud/ndb/metadata.py new file mode 100644 index 000000000000..d9fc40d685b7 --- /dev/null +++ b/packages/google-cloud-ndb/google/cloud/ndb/metadata.py @@ -0,0 +1,371 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Models and helper functions for access to a project's datastore metadata. + +These entities cannot be created by users, but are created as the results of +__namespace__, __kind__, __property__ and __entity_group__ metadata queries +or gets. + +A simplified API is also offered: + + :func:`get_namespaces`: A list of namespace names. + + :func:`get_kinds`: A list of kind names. + + :func:`get_properties_of_kind`: A list of property names + for the given kind name. + + :func:`get_representations_of_kind`: A dict mapping + property names to lists of representation ids. + + get_kinds(), get_properties_of_kind(), get_representations_of_kind() + implicitly apply to the current namespace. + + get_namespaces(), get_kinds(), get_properties_of_kind(), + get_representations_of_kind() have optional start and end arguments to + limit the query to a range of names, such that start <= name < end. +""" + +from google.cloud.ndb import exceptions +from google.cloud.ndb import model +from google.cloud.ndb import query as query_module + + +__all__ = [ + "get_entity_group_version", + "get_kinds", + "get_namespaces", + "get_properties_of_kind", + "get_representations_of_kind", + "EntityGroup", + "Kind", + "Namespace", + "Property", +] + + +class _BaseMetadata(model.Model): + """Base class for all metadata models.""" + + _use_cache = False + _use_global_cache = False + + KIND_NAME = "" + + def __new__(cls, *args, **kwargs): + """override to prevent instantiation""" + if cls is _BaseMetadata: + raise TypeError("This base class cannot be instantiated") + return super(_BaseMetadata, cls).__new__(cls) + + @classmethod + def _get_kind(cls): + """Kind name override.""" + return cls.KIND_NAME + + +class Namespace(_BaseMetadata): + """Model for __namespace__ metadata query results.""" + + KIND_NAME = "__namespace__" + EMPTY_NAMESPACE_ID = 1 + + @property + def namespace_name(self): + """Return the namespace name specified by this entity's key. + + Returns: + str: the namespace name. + """ + return self.key_to_namespace(self.key) + + @classmethod + def key_for_namespace(cls, namespace): + """Return the Key for a namespace. + + Args: + namespace (str): A string giving the namespace whose key is + requested. + + Returns: + key.Key: The Key for the namespace. + """ + if namespace is not None: + return model.Key(cls.KIND_NAME, namespace) + else: + return model.Key(cls.KIND_NAME, cls.EMPTY_NAMESPACE_ID) + + @classmethod + def key_to_namespace(cls, key): + """Return the namespace specified by a given __namespace__ key. + + Args: + key (key.Key): key whose name is requested. + + Returns: + str: The namespace specified by key. + """ + return key.string_id() or "" + + +class Kind(_BaseMetadata): + """Model for __kind__ metadata query results.""" + + KIND_NAME = "__kind__" + + @property + def kind_name(self): + """Return the kind name specified by this entity's key. + + Returns: + str: the kind name. + """ + return self.key_to_kind(self.key) + + @classmethod + def key_for_kind(cls, kind): + """Return the __kind__ key for kind. + + Args: + kind (str): kind whose key is requested. + + Returns: + key.Key: key for kind. + """ + return model.Key(cls.KIND_NAME, kind) + + @classmethod + def key_to_kind(cls, key): + """Return the kind specified by a given __kind__ key. + + Args: + key (key.Key): key whose name is requested. + + Returns: + str: The kind specified by key. + """ + return key.id() + + +class Property(_BaseMetadata): + """Model for __property__ metadata query results.""" + + KIND_NAME = "__property__" + + @property + def property_name(self): + """Return the property name specified by this entity's key. + + Returns: + str: the property name. + """ + return self.key_to_property(self.key) + + @property + def kind_name(self): + """Return the kind name specified by this entity's key. + + Returns: + str: the kind name. + """ + return self.key_to_kind(self.key) + + property_representation = model.StringProperty(repeated=True) + + @classmethod + def key_for_kind(cls, kind): + """Return the __property__ key for kind. + + Args: + kind (str): kind whose key is requested. + + Returns: + key.Key: The parent key for __property__ keys of kind. + """ + return model.Key(Kind.KIND_NAME, kind) + + @classmethod + def key_for_property(cls, kind, property): + """Return the __property__ key for property of kind. + + Args: + kind (str): kind whose key is requested. + property (str): property whose key is requested. + + Returns: + key.Key: The key for property of kind. + """ + return model.Key(Kind.KIND_NAME, kind, Property.KIND_NAME, property) + + @classmethod + def key_to_kind(cls, key): + """Return the kind specified by a given __property__ key. + + Args: + key (key.Key): key whose kind name is requested. + + Returns: + str: The kind specified by key. + """ + if key.kind() == Kind.KIND_NAME: + return key.id() + else: + return key.parent().id() + + @classmethod + def key_to_property(cls, key): + """Return the property specified by a given __property__ key. + + Args: + key (key.Key): key whose property name is requested. + + Returns: + str: property specified by key, or None if the key specified + only a kind. + """ + if key.kind() == Kind.KIND_NAME: + return None + else: + return key.id() + + +class EntityGroup(object): + """Model for __entity_group__ metadata. No longer supported by datastore.""" + + def __new__(self, *args, **kwargs): + raise exceptions.NoLongerImplementedError() + + +def get_entity_group_version(*args, **kwargs): + """Return the version of the entity group containing key. + + Raises: + :class:google.cloud.ndb.exceptions.NoLongerImplementedError. Always. + This method is not supported anymore. + """ + raise exceptions.NoLongerImplementedError() + + +def get_kinds(start=None, end=None): + """Return all kinds in the specified range, for the current namespace. + + Args: + start (str): only return kinds >= start if start is not None. + end (str): only return kinds < end if end is not None. + + Returns: + List[str]: Kind names between the (optional) start and end values. + """ + # This is required for the query to find the model for __kind__ + Kind._fix_up_properties() + + query = query_module.Query(kind=Kind._get_kind()) + if start is not None and start != "": + query = query.filter(Kind.key >= Kind.key_for_kind(start)) + if end is not None: + if end == "": + return [] + query = query.filter(Kind.key < Kind.key_for_kind(end)) + + results = query.fetch() + return [result.kind_name for result in results] + + +def get_namespaces(start=None, end=None): + """Return all namespaces in the specified range. + + Args: + start (str): only return namespaces >= start if start is not None. + end (str): only return namespaces < end if end is not None. + + Returns: + List[str]: Namespace names between the (optional) start and end values. + """ + # This is required for the query to find the model for __namespace__ + Namespace._fix_up_properties() + + query = query_module.Query(kind=Namespace._get_kind()) + if start is not None: + query = query.filter(Namespace.key >= Namespace.key_for_namespace(start)) + if end is not None: + query = query.filter(Namespace.key < Namespace.key_for_namespace(end)) + + results = query.fetch() + return [result.namespace_name for result in results] + + +def get_properties_of_kind(kind, start=None, end=None): + """Return all properties of kind in the specified range. + + NOTE: This function does not return unindexed properties. + + Args: + kind (str): name of kind whose properties you want. + start (str): only return properties >= start if start is not None. + end (str): only return properties < end if end is not None. + + Returns: + List[str]: Property names of kind between the (optional) start and end + values. + """ + # This is required for the query to find the model for __property__ + Property._fix_up_properties() + + query = query_module.Query( + kind=Property._get_kind(), ancestor=Property.key_for_kind(kind) + ) + if start is not None and start != "": + query = query.filter(Property.key >= Property.key_for_property(kind, start)) + if end is not None: + if end == "": + return [] + query = query.filter(Property.key < Property.key_for_property(kind, end)) + + results = query.fetch() + return [prop.property_name for prop in results] + + +def get_representations_of_kind(kind, start=None, end=None): + """Return all representations of properties of kind in the specified range. + + NOTE: This function does not return unindexed properties. + + Args: + kind: name of kind whose properties you want. + start: only return properties >= start if start is not None. + end: only return properties < end if end is not None. + + Returns: + dict: map of property names to their list of representations. + """ + # This is required for the query to find the model for __property__ + Property._fix_up_properties() + + query = query_module.Query( + kind=Property._get_kind(), ancestor=Property.key_for_kind(kind) + ) + if start is not None and start != "": + query = query.filter(Property.key >= Property.key_for_property(kind, start)) + if end is not None: + if end == "": + return {} + query = query.filter(Property.key < Property.key_for_property(kind, end)) + + representations = {} + results = query.fetch() + for property in results: + representations[property.property_name] = property.property_representation + + return representations diff --git a/packages/google-cloud-ndb/google/cloud/ndb/model.py b/packages/google-cloud-ndb/google/cloud/ndb/model.py new file mode 100644 index 000000000000..c4d3cdb66ed4 --- /dev/null +++ b/packages/google-cloud-ndb/google/cloud/ndb/model.py @@ -0,0 +1,6689 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Model classes for datastore objects and properties for models. + +.. testsetup:: * + + from unittest import mock + from google.cloud import ndb + from google.cloud.ndb import context as context_module + + client = mock.Mock( + project="testing", + database=None, + namespace=None, + stub=mock.Mock(spec=()), + spec=("project", "namespace", "database", "stub"), + ) + context = context_module.Context(client).use() + context.__enter__() + +.. testcleanup:: * + + context.__exit__(None, None, None) + +A model class represents the structure of entities stored in the datastore. +Applications define model classes to indicate the structure of their entities, +then instantiate those model classes to create entities. + +All model classes must inherit (directly or indirectly) from Model. Through +the magic of metaclasses, straightforward assignments in the model class +definition can be used to declare the model's structure:: + + class Person(Model): + name = StringProperty() + age = IntegerProperty() + +We can now create a Person entity and write it to Cloud Datastore:: + + person = Person(name='Arthur Dent', age=42) + key = person.put() + +The return value from put() is a Key (see the documentation for +``ndb/key.py``), which can be used to retrieve the same entity later:: + + person2 = key.get() + person2 == person # Returns True + +To update an entity, simply change its attributes and write it back (note that +this doesn't change the key):: + + person2.name = 'Arthur Philip Dent' + person2.put() + +We can also delete an entity (by using the key):: + + key.delete() + +The property definitions in the class body tell the system the names and the +types of the fields to be stored in Cloud Datastore, whether they must be +indexed, their default value, and more. + +Many different Property types exist. Most are indexed by default, the +exceptions are indicated in the list below: + +- :class:`StringProperty`: a short text string, limited to at most 1500 bytes + (when UTF-8 encoded from :class:`str` to bytes). +- :class:`TextProperty`: an unlimited text string; unindexed. +- :class:`BlobProperty`: an unlimited byte string; unindexed. +- :class:`IntegerProperty`: a 64-bit signed integer. +- :class:`FloatProperty`: a double precision floating point number. +- :class:`BooleanProperty`: a bool value. +- :class:`DateTimeProperty`: a datetime object. Note: Datastore always uses + UTC as the timezone. +- :class:`DateProperty`: a date object. +- :class:`TimeProperty`: a time object. +- :class:`GeoPtProperty`: a geographical location, i.e. (latitude, longitude). +- :class:`KeyProperty`: a Cloud Datastore Key value, optionally constrained to + referring to a specific kind. +- :class:`UserProperty`: a User object (for backwards compatibility only) +- :class:`StructuredProperty`: a field that is itself structured like an + entity; see below for more details. +- :class:`LocalStructuredProperty`: like StructuredProperty but the on-disk + representation is an opaque blob; unindexed. +- :class:`ComputedProperty`: a property whose value is computed from other + properties by a user-defined function. The property value is written to Cloud + Datastore so that it can be used in queries, but the value from Cloud + Datastore is not used when the entity is read back. +- :class:`GenericProperty`: a property whose type is not constrained; mostly + used by the Expando class (see below) but also usable explicitly. +- :class:`JsonProperty`: a property whose value is any object that can be + serialized using JSON; the value written to Cloud Datastore is a JSON + representation of that object. +- :class:`PickleProperty`: a property whose value is any object that can be + serialized using Python's pickle protocol; the value written to the Cloud + Datastore is the pickled representation of that object, using the highest + available pickle protocol + +Most Property classes have similar constructor signatures. They +accept several optional keyword arguments: + +- name=: the name used to store the property value in the datastore. + Unlike the following options, this may also be given as a positional + argument. +- indexed=: indicates whether the property should be indexed (allowing + queries on this property's value). +- repeated=: indicates that this property can have multiple values in + the same entity. +- write_empty_list: For repeated value properties, controls whether + properties with no elements (the empty list) is written to Datastore. If + true, written, if false, then nothing is written to Datastore. +- required=: indicates that this property must be given a value. +- default=: a default value if no explicit value is given. +- choices=: a list or tuple of allowable values. +- validator=: a general-purpose validation function. It will be + called with two arguments (prop, value) and should either return the + validated value or raise an exception. It is also allowed for the function + to modify the value, but the function should be idempotent. For example: a + validator that returns value.strip() or value.lower() is fine, but one that + returns value + '$' is not). +- verbose_name=: A human readable name for this property. This human + readable name can be used for html form labels. + +The repeated and required/default options are mutually exclusive: a repeated +property cannot be required nor can it specify a default value (the default is +always an empty list and an empty list is always an allowed value), but a +required property can have a default. + +Some property types have additional arguments. Some property types do not +support all options. + +Repeated properties are always represented as Python lists; if there is only +one value, the list has only one element. When a new list is assigned to a +repeated property, all elements of the list are validated. Since it is also +possible to mutate lists in place, repeated properties are re-validated before +they are written to the datastore. + +No validation happens when an entity is read from Cloud Datastore; however +property values read that have the wrong type (e.g. a string value for an +IntegerProperty) are ignored. + +For non-repeated properties, None is always a possible value, and no validation +is called when the value is set to None. However for required properties, +writing the entity to Cloud Datastore requires the value to be something other +than None (and valid). + +The StructuredProperty is different from most other properties; it lets you +define a sub-structure for your entities. The substructure itself is defined +using a model class, and the attribute value is an instance of that model +class. However, it is not stored in the datastore as a separate entity; +instead, its attribute values are included in the parent entity using a naming +convention (the name of the structured attribute followed by a dot followed by +the name of the subattribute). For example:: + + class Address(Model): + street = StringProperty() + city = StringProperty() + + class Person(Model): + name = StringProperty() + address = StructuredProperty(Address) + + p = Person(name='Harry Potter', + address=Address(street='4 Privet Drive', + city='Little Whinging')) + k = p.put() + +This would write a single 'Person' entity with three attributes (as you could +verify using the Datastore Viewer in the Admin Console):: + + name = 'Harry Potter' + address.street = '4 Privet Drive' + address.city = 'Little Whinging' + +Structured property types can be nested arbitrarily deep, but in a hierarchy of +nested structured property types, only one level can have the repeated flag +set. It is fine to have multiple structured properties referencing the same +model class. + +It is also fine to use the same model class both as a top-level entity class +and as for a structured property; however, queries for the model class will +only return the top-level entities. + +The LocalStructuredProperty works similar to StructuredProperty on the Python +side. For example:: + + class Address(Model): + street = StringProperty() + city = StringProperty() + + class Person(Model): + name = StringProperty() + address = LocalStructuredProperty(Address) + + p = Person(name='Harry Potter', + address=Address(street='4 Privet Drive', + city='Little Whinging')) + k = p.put() + +However, the data written to Cloud Datastore is different; it writes a 'Person' +entity with a 'name' attribute as before and a single 'address' attribute +whose value is a blob which encodes the Address value (using the standard +"protocol buffer" encoding). + +The Model class offers basic query support. You can create a Query object by +calling the query() class method. Iterating over a Query object returns the +entities matching the query one at a time. Query objects are fully described +in the documentation for query, but there is one handy shortcut that is only +available through Model.query(): positional arguments are interpreted as filter +expressions which are combined through an AND operator. For example:: + + Person.query(Person.name == 'Harry Potter', Person.age >= 11) + +is equivalent to:: + + Person.query().filter(Person.name == 'Harry Potter', Person.age >= 11) + +Keyword arguments passed to .query() are passed along to the Query() +constructor. + +It is possible to query for field values of structured properties. For +example:: + + qry = Person.query(Person.address.city == 'London') + +A number of top-level functions also live in this module: + +- :func:`get_multi` reads multiple entities at once. +- :func:`put_multi` writes multiple entities at once. +- :func:`delete_multi` deletes multiple entities at once. + +All these have a corresponding ``*_async()`` variant as well. The +``*_multi_async()`` functions return a list of Futures. + +There are many other interesting features. For example, Model subclasses may +define pre-call and post-call hooks for most operations (get, put, delete, +allocate_ids), and Property classes may be subclassed to suit various needs. +Documentation for writing a Property subclass is in the docs for the +:class:`Property` class. +""" + + +import copy +import datetime +import functools +import inspect +import json +import pickle +import zlib + +import pytz + +from google.cloud.datastore import entity as ds_entity_module +from google.cloud.datastore import helpers +from google.cloud.datastore_v1.types import entity as entity_pb2 + +from google.cloud.ndb import _legacy_entity_pb +from google.cloud.ndb import _datastore_types +from google.cloud.ndb import exceptions +from google.cloud.ndb import key as key_module +from google.cloud.ndb import _options as options_module +from google.cloud.ndb import query as query_module +from google.cloud.ndb import _transaction +from google.cloud.ndb import tasklets +from google.cloud.ndb import utils + + +__all__ = [ + "Key", + "BlobKey", + "GeoPt", + "Rollback", + "KindError", + "InvalidPropertyError", + "BadProjectionError", + "UnprojectedPropertyError", + "ReadonlyPropertyError", + "ComputedPropertyError", + "UserNotFoundError", + "IndexProperty", + "Index", + "IndexState", + "ModelAdapter", + "make_connection", + "ModelAttribute", + "Property", + "ModelKey", + "BooleanProperty", + "IntegerProperty", + "FloatProperty", + "BlobProperty", + "CompressedTextProperty", + "TextProperty", + "StringProperty", + "GeoPtProperty", + "PickleProperty", + "JsonProperty", + "User", + "UserProperty", + "KeyProperty", + "BlobKeyProperty", + "DateTimeProperty", + "DateProperty", + "TimeProperty", + "StructuredProperty", + "LocalStructuredProperty", + "GenericProperty", + "ComputedProperty", + "MetaModel", + "Model", + "Expando", + "get_multi_async", + "get_multi", + "put_multi_async", + "put_multi", + "delete_multi_async", + "delete_multi", + "get_indexes_async", + "get_indexes", +] + + +_MEANING_PREDEFINED_ENTITY_USER = 20 +_MEANING_COMPRESSED = 22 + +_ZLIB_COMPRESSION_MARKERS = ( + # As produced by zlib. Indicates compressed byte sequence using DEFLATE at + # default compression level, with a 32K window size. + # From https://github.com/madler/zlib/blob/master/doc/rfc1950.txt + b"x\x9c", + # Other compression levels produce the following marker. + b"x^", +) + +_MAX_STRING_LENGTH = 1500 +Key = key_module.Key +BlobKey = _datastore_types.BlobKey +GeoPt = helpers.GeoPoint +Rollback = exceptions.Rollback + +_getfullargspec = inspect.getfullargspec + + +class KindError(exceptions.BadValueError): + """Raised when an implementation for a kind can't be found. + + May also be raised when the kind is not a byte string. + """ + + +class InvalidPropertyError(exceptions.Error): + """Raised when a property is not applicable to a given use. + + For example, a property must exist and be indexed to be used in a query's + projection or group by clause. + """ + + +BadProjectionError = InvalidPropertyError +"""This alias for :class:`InvalidPropertyError` is for legacy support.""" + + +class UnprojectedPropertyError(exceptions.Error): + """Raised when getting a property value that's not in the projection.""" + + +class ReadonlyPropertyError(exceptions.Error): + """Raised when attempting to set a property value that is read-only.""" + + +class ComputedPropertyError(ReadonlyPropertyError): + """Raised when attempting to set or delete a computed property.""" + + +class UserNotFoundError(exceptions.Error): + """No email argument was specified, and no user is logged in.""" + + +class _NotEqualMixin(object): + """Mix-in class that implements __ne__ in terms of __eq__.""" + + def __ne__(self, other): + """Implement self != other as not(self == other).""" + eq = self.__eq__(other) + if eq is NotImplemented: + return NotImplemented + return not eq + + +class IndexProperty(_NotEqualMixin): + """Immutable object representing a single property in an index.""" + + @utils.positional(1) + def __new__(cls, name, direction): + instance = super(IndexProperty, cls).__new__(cls) + instance._name = name + instance._direction = direction + return instance + + @property + def name(self): + """str: The property name being indexed.""" + return self._name + + @property + def direction(self): + """str: The direction in the index, ``asc`` or ``desc``.""" + return self._direction + + def __repr__(self): + """Return a string representation.""" + return "{}(name={!r}, direction={!r})".format( + type(self).__name__, self.name, self.direction + ) + + def __eq__(self, other): + """Compare two index properties for equality.""" + if not isinstance(other, IndexProperty): + return NotImplemented + return self.name == other.name and self.direction == other.direction + + def __hash__(self): + return hash((self.name, self.direction)) + + +class Index(_NotEqualMixin): + """Immutable object representing an index.""" + + @utils.positional(1) + def __new__(cls, kind, properties, ancestor): + instance = super(Index, cls).__new__(cls) + instance._kind = kind + instance._properties = properties + instance._ancestor = ancestor + return instance + + @property + def kind(self): + """str: The kind being indexed.""" + return self._kind + + @property + def properties(self): + """List[IndexProperty]: The properties being indexed.""" + return self._properties + + @property + def ancestor(self): + """bool: Indicates if this is an ancestor index.""" + return self._ancestor + + def __repr__(self): + """Return a string representation.""" + return "{}(kind={!r}, properties={!r}, ancestor={})".format( + type(self).__name__, self.kind, self.properties, self.ancestor + ) + + def __eq__(self, other): + """Compare two indexes.""" + if not isinstance(other, Index): + return NotImplemented + + return ( + self.kind == other.kind + and self.properties == other.properties + and self.ancestor == other.ancestor + ) + + def __hash__(self): + return hash((self.kind, self.properties, self.ancestor)) + + +class IndexState(_NotEqualMixin): + """Immutable object representing an index and its state.""" + + @utils.positional(1) + def __new__(cls, definition, state, id): + instance = super(IndexState, cls).__new__(cls) + instance._definition = definition + instance._state = state + instance._id = id + return instance + + @property + def definition(self): + """Index: The index corresponding to the tracked state.""" + return self._definition + + @property + def state(self): + """str: The index state. + + Possible values are ``error``, ``deleting``, ``serving`` or + ``building``. + """ + return self._state + + @property + def id(self): + """int: The index ID.""" + return self._id + + def __repr__(self): + """Return a string representation.""" + return "{}(definition={!r}, state={!r}, id={:d})".format( + type(self).__name__, self.definition, self.state, self.id + ) + + def __eq__(self, other): + """Compare two index states.""" + if not isinstance(other, IndexState): + return NotImplemented + + return ( + self.definition == other.definition + and self.state == other.state + and self.id == other.id + ) + + def __hash__(self): + return hash((self.definition, self.state, self.id)) + + +class ModelAdapter(object): + def __new__(self, *args, **kwargs): + raise exceptions.NoLongerImplementedError() + + +def _entity_from_ds_entity(ds_entity, model_class=None): + """Create an entity from a datastore entity. + + Args: + ds_entity (google.cloud.datastore_v1.types.Entity): An entity to be + deserialized. + model_class (class): Optional; ndb Model class type. + + Returns: + .Model: The deserialized entity. + """ + class_key = ds_entity.get("class") + if class_key: + # If this is a projection query, we'll get multiple entities with + # scalar values rather than single entities with array values. + # It's weird: + # https://cloud.google.com/datastore/docs/concepts/queries#datastore-datastore-array-value-python + if not isinstance(class_key, list): + kind = class_key + else: + kind = class_key[-1] + else: + kind = ds_entity.kind + + model_class = model_class or Model._lookup_model(kind) + entity = model_class() + + if ds_entity.key: + entity._key = key_module.Key._from_ds_key(ds_entity.key) + + for name, value in ds_entity.items(): + # If ``name`` was used to define the property, ds_entity name will not + # match model property name. + name = model_class._code_name_from_stored_name(name) + + prop = getattr(model_class, name, None) + + # Backwards compatibility shim. NDB previously stored structured + # properties as sets of dotted name properties. Datastore now has + # native support for embedded entities and NDB now uses that, by + # default. This handles the case of reading structured properties from + # older NDB datastore instances. + # + # Turns out this is also useful when doing projection queries with + # repeated structured properties, in which case, due to oddities with + # how Datastore handles these things, we'll get a scalar value for the + # subvalue, instead of an array, like you'd expect when just + # marshalling the entity normally (instead of in a projection query). + # + def new_entity(key): + return _BaseValue(ds_entity_module.Entity(key)) + + if prop is None and "." in name: + supername, subname = name.split(".", 1) + # Code name for structured property could be different than stored + # name if ``name`` was set when defined. + supername = model_class._code_name_from_stored_name(supername) + structprop = getattr(model_class, supername, None) + if isinstance(structprop, StructuredProperty): + subvalue = value + value = structprop._get_base_value(entity) + if value in (None, []): # empty list for repeated props + kind = structprop._model_class._get_kind() + key = key_module.Key(kind, None) + if structprop._repeated: + if isinstance(subvalue, list): + # Not a projection + value = [new_entity(key._key) for _ in subvalue] + else: + # Is a projection, so subvalue is scalar. Only need + # one subentity. + value = [new_entity(key._key)] + else: + value = new_entity(key._key) + + structprop._store_value(entity, value) + + if structprop._repeated: + if isinstance(subvalue, list): + # Not a projection + + # In the rare case of using a repeated + # StructuredProperty where the sub-model is an Expando, + # legacy NDB could write repeated properties of + # different lengths for the subproperties, which was a + # bug. We work around this when reading out such values + # by making sure our repeated property is the same + # length as the longest subproperty. + # Make sure to create a key of the same kind as + # the other entries in the value list + while len(subvalue) > len(value): + # Need to make some more subentities + expando_kind = structprop._model_class._get_kind() + expando_key = key_module.Key(expando_kind, None) + value.append(new_entity(expando_key._key)) + + # Branch coverage bug, + # See: https://github.com/nedbat/coveragepy/issues/817 + for subentity, subsubvalue in zip( # pragma no branch + value, subvalue + ): + subentity.b_val.update({subname: subsubvalue}) + else: + # Is a projection, so subvalue is scalar and we only + # have one subentity. + value[0].b_val.update({subname: subvalue}) + else: + value.b_val.update({subname: subvalue}) + + continue + + if prop is None and kind is not None and kind != model_class.__name__: + # kind and model_class name do not match, so this is probably a + # polymodel. We need to check if the prop belongs to the subclass. + model_subclass = Model._lookup_model(kind) + prop = getattr(model_subclass, name, None) + + def base_value_or_none(value): + return None if value is None else _BaseValue(value) + + if not (prop is not None and isinstance(prop, Property)): + if value is not None and isinstance(entity, Expando): # pragma: NO BRANCH + if isinstance(value, list): + value = [base_value_or_none(sub_value) for sub_value in value] + else: + value = _BaseValue(value) + setattr(entity, name, value) + continue # pragma: NO COVER + + if value is not None: + if prop._repeated: + # A repeated property will have a scalar value if this is a + # projection query. + if isinstance(value, list): + # Not a projection + value = [base_value_or_none(sub_value) for sub_value in value] + else: + # Projection + value = [_BaseValue(value)] + + else: + value = _BaseValue(value) + + value = prop._from_datastore(ds_entity, value) + + prop._store_value(entity, value) + + return entity + + +def _entity_from_protobuf(protobuf): + """Deserialize an entity from a protobuffer. + + Args: + protobuf (google.cloud.datastore_v1.types.Entity): An entity protobuf + to be deserialized. + + Returns: + .Model: The deserialized entity. + """ + ds_entity = helpers.entity_from_protobuf(protobuf) + return _entity_from_ds_entity(ds_entity) + + +def _properties_of(*entities): + """Get the model properties for one or more entities. + + After collecting any properties local to the given entities, will traverse the + entities' MRO (class hierarchy) up from the entities' class through all of its + ancestors, collecting any ``Property`` instances defined for those classes. + + Args: + entities (Tuple[model.Model]): The entities to get properties for. All entities + are expected to be of the same class. + + Returns: + Iterator[Property]: Iterator over the entities' properties. + """ + seen = set() + + entity_type = type(entities[0]) # assume all entities are same type + for level in entities + tuple(entity_type.mro()): + if not hasattr(level, "_properties"): + continue + + level_properties = getattr(level, "_properties", {}) + for prop in level_properties.values(): + if ( + not isinstance(prop, Property) + or isinstance(prop, ModelKey) + or prop._name in seen + ): + continue + + seen.add(prop._name) + yield prop + + +def _entity_to_ds_entity(entity, set_key=True): + """Convert an NDB entity to Datastore entity. + + Args: + entity (Model): The entity to be converted. + + Returns: + google.cloud.datastore.entity.Entity: The converted entity. + + Raises: + ndb.exceptions.BadValueError: If entity has uninitialized properties. + """ + data = {"_exclude_from_indexes": []} + uninitialized = [] + + for prop in _properties_of(entity): + if not prop._is_initialized(entity): + uninitialized.append(prop._name) + + prop._to_datastore(entity, data) + + if uninitialized: + missing = ", ".join(uninitialized) + raise exceptions.BadValueError( + "Entity has uninitialized properties: {}".format(missing) + ) + + exclude_from_indexes = data.pop("_exclude_from_indexes") + ds_entity = None + if set_key: + key = entity._key + if key is None: + key = key_module.Key(entity._get_kind(), None) + ds_entity = ds_entity_module.Entity( + key._key, exclude_from_indexes=exclude_from_indexes + ) + else: + ds_entity = ds_entity_module.Entity(exclude_from_indexes=exclude_from_indexes) + + # Some properties may need to set meanings for backwards compatibility, + # so we look for them. They are set using the _to_datastore calls above. + meanings = data.pop("_meanings", None) + if meanings is not None: + ds_entity._meanings = meanings + + ds_entity.update(data) + + return ds_entity + + +def _entity_to_protobuf(entity, set_key=True): + """Serialize an entity to a protocol buffer. + + Args: + entity (Model): The entity to be serialized. + + Returns: + google.cloud.datastore_v1.types.Entity: The protocol buffer + representation. Note that some methods are now only + accessible via the `_pb` property. + """ + ds_entity = _entity_to_ds_entity(entity, set_key=set_key) + return helpers.entity_to_protobuf(ds_entity) + + +def make_connection(*args, **kwargs): + raise exceptions.NoLongerImplementedError() + + +class ModelAttribute(object): + """Base for classes that implement a ``_fix_up()`` method.""" + + def _fix_up(self, cls, code_name): + """Fix-up property name. To be implemented by subclasses. + + Args: + cls (type): The model class that owns the property. + code_name (str): The name of the :class:`Property` being fixed up. + """ + + +class _BaseValue(_NotEqualMixin): + """A marker object wrapping a "base type" value. + + This is used to be able to tell whether ``entity._values[name]`` is a + user value (i.e. of a type that the Python code understands) or a + base value (i.e of a type that serialization understands). + User values are unwrapped; base values are wrapped in a + :class:`_BaseValue` instance. + + Args: + b_val (Any): The base value to be wrapped. + + Raises: + TypeError: If ``b_val`` is :data:`None`. + TypeError: If ``b_val`` is a list. + """ + + def __init__(self, b_val): + if b_val is None: + raise TypeError("Cannot wrap None") + if isinstance(b_val, list): + raise TypeError("Lists cannot be wrapped. Received", b_val) + self.b_val = b_val + + def __repr__(self): + return "_BaseValue({!r})".format(self.b_val) + + def __eq__(self, other): + """Compare two :class:`_BaseValue` instances.""" + if not isinstance(other, _BaseValue): + return NotImplemented + + return self.b_val == other.b_val + + def __hash__(self): + raise TypeError("_BaseValue is not immutable") + + +class Property(ModelAttribute): + """A class describing a typed, persisted attribute of an entity. + + .. warning:: + + This is not to be confused with Python's ``@property`` built-in. + + .. note:: + + This is just a base class; there are specific subclasses that + describe properties of various types (and :class:`GenericProperty` + which describes a dynamically typed property). + + The :class:`Property` does not reserve any "public" names (i.e. names + that don't start with an underscore). This is intentional; the subclass + :class:`StructuredProperty` uses the public attribute namespace to refer to + nested property names (this is essential for specifying queries on + subproperties). + + The :meth:`IN` attribute is provided as an alias for ``_IN``, but ``IN`` + can be overridden if a subproperty has the same name. + + The :class:`Property` class and its predefined subclasses allow easy + subclassing using composable (or stackable) validation and + conversion APIs. These require some terminology definitions: + + * A **user value** is a value such as would be set and accessed by the + application code using standard attributes on the entity. + * A **base value** is a value such as would be serialized to + and deserialized from Cloud Datastore. + + A property will be a member of a :class:`Model` and will be used to help + store values in an ``entity`` (i.e. instance of a model subclass). The + underlying stored values can be either user values or base values. + + To interact with the composable conversion and validation API, a + :class:`Property` subclass can define + + * ``_to_base_type()`` + * ``_from_base_type()`` + * ``_validate()`` + + These should **not** call their ``super()`` method, since the methods + are meant to be composed. For example with composable validation: + + .. code-block:: python + + class Positive(ndb.IntegerProperty): + def _validate(self, value): + if value < 1: + raise ndb.exceptions.BadValueError("Non-positive", value) + + + class SingleDigit(Positive): + def _validate(self, value): + if value > 9: + raise ndb.exceptions.BadValueError("Multi-digit", value) + + neither ``_validate()`` method calls ``super()``. Instead, when a + ``SingleDigit`` property validates a value, it composes all validation + calls in order: + + * ``SingleDigit._validate`` + * ``Positive._validate`` + * ``IntegerProperty._validate`` + + The API supports "stacking" classes with ever more sophisticated + user / base conversions: + + * the user to base conversion goes from more sophisticated to less + sophisticated + * the base to user conversion goes from less sophisticated to more + sophisticated + + For example, see the relationship between :class:`BlobProperty`, + :class:`TextProperty` and :class:`StringProperty`. + + The validation API distinguishes between "lax" and "strict" user values. + The set of lax values is a superset of the set of strict values. The + ``_validate()`` method takes a lax value and if necessary converts it to + a strict value. For example, an integer (lax) can be converted to a + floating point (strict) value. This means that when setting the property + value, lax values are accepted, while when getting the property value, only + strict values will be returned. If no conversion is needed, ``_validate()`` + may return :data:`None`. If the argument is outside the set of accepted lax + values, ``_validate()`` should raise an exception, preferably + :exc:`TypeError` or :exc:`.BadValueError`. + + A class utilizing all three may resemble: + + .. code-block:: python + + class WidgetProperty(ndb.Property): + + def _validate(self, value): + # Lax user value to strict user value. + if not isinstance(value, Widget): + raise ndb.exceptions.BadValueError(value) + + def _to_base_type(self, value): + # (Strict) user value to base value. + if isinstance(value, Widget): + return value.to_internal() + + def _from_base_type(self, value): + # Base value to (strict) user value.' + if not isinstance(value, _WidgetInternal): + return Widget(value) + + There are some things that ``_validate()``, ``_to_base_type()`` and + ``_from_base_type()`` do **not** need to handle: + + * :data:`None`: They will not be called with :data:`None` (and if they + return :data:`None`, this means that the value does not need conversion). + * Repeated values: The infrastructure takes care of calling + ``_from_base_type()`` or ``_to_base_type()`` for each list item in a + repeated value. + * Wrapping "base" values: The wrapping and unwrapping is taken care of by + the infrastructure that calls the composable APIs. + * Comparisons: The comparison operations call ``_to_base_type()`` on + their operand. + * Distinguishing between user and base values: the infrastructure + guarantees that ``_from_base_type()`` will be called with an + (unwrapped) base value, and that ``_to_base_type()`` will be called + with a user value. + * Returning the original value: if any of these return :data:`None`, the + original value is kept. (Returning a different value not equal to + :data:`None` will substitute the different value.) + + Additionally, :meth:`_prepare_for_put` can be used to integrate with + datastore save hooks used by :class:`Model` instances. + + .. automethod:: _prepare_for_put + + Args: + name (str): The name of the property. + indexed (bool): Indicates if the value should be indexed. + repeated (bool): Indicates if this property is repeated, i.e. contains + multiple values. + required (bool): Indicates if this property is required on the given + model type. + default (Any): The default value for this property. + choices (Iterable[Any]): A container of allowed values for this + property. + validator (Callable[[~google.cloud.ndb.model.Property, Any], bool]): A + validator to be used to check values. + verbose_name (str): A longer, user-friendly name for this property. + write_empty_list (bool): Indicates if an empty list should be written + to the datastore. + """ + + # Instance default fallbacks provided by class. + _code_name = None + _name = None + _indexed = True + _repeated = False + _required = False + _default = None + _choices = None + _validator = None + _verbose_name = None + _write_empty_list = False + # Non-public class attributes. + _FIND_METHODS_CACHE = {} + + @utils.positional(2) + def __init__( + self, + name=None, + indexed=None, + repeated=None, + required=None, + default=None, + choices=None, + validator=None, + verbose_name=None, + write_empty_list=None, + ): + # NOTE: These explicitly avoid setting the values so that the + # instances will fall back to the class on lookup. + if name is not None: + self._name = self._verify_name(name) + if indexed is not None: + self._indexed = indexed + if repeated is not None: + self._repeated = repeated + if required is not None: + self._required = required + if default is not None: + self._default = default + self._verify_repeated() + if choices is not None: + self._choices = self._verify_choices(choices) + if validator is not None: + self._validator = self._verify_validator(validator) + if verbose_name is not None: + self._verbose_name = verbose_name + if write_empty_list is not None: + self._write_empty_list = write_empty_list + + @staticmethod + def _verify_name(name): + """Verify the name of the property. + + Args: + name (str): The name of the property. + + Returns: + str: The ``name`` passed in. + + Raises: + TypeError: If the ``name`` is not a string. + ValueError: If the name contains a ``.``. + """ + if not isinstance(name, str): + raise TypeError("Name {!r} is not a string".format(name)) + + if "." in name: + raise ValueError("Name {!r} cannot contain period characters".format(name)) + + return name + + def _verify_repeated(self): + """Checks if the repeated / required / default values are compatible. + + Raises: + ValueError: If ``repeated`` is :data:`True` but one of + ``required`` or ``default`` is set. + """ + if self._repeated and (self._required or self._default is not None): + raise ValueError("repeated is incompatible with required or default") + + @staticmethod + def _verify_choices(choices): + """Verify the choices for a property with a limited set of values. + + Args: + choices (Union[list, tuple, set, frozenset]): An iterable of + allowed values for the property. + + Returns: + frozenset: The ``choices`` cast to a frozen set. + + Raises: + TypeError: If ``choices`` is not one of the expected container + types. + """ + if not isinstance(choices, (list, tuple, set, frozenset)): + raise TypeError( + "choices must be a list, tuple or set; received {!r}".format(choices) + ) + return frozenset(choices) + + @staticmethod + def _verify_validator(validator): + """Verify the validator for a property. + + The validator will be called as follows: + + .. code-block:: python + + value = validator(prop, value) + + The ``validator`` should be idempotent, i.e. calling it a second time + should not further modify the value. So a validator that returns e.g. + ``value.lower()`` or ``value.strip()`` is fine, but one that returns + ``value + "$"`` is not. + + Args: + validator (Callable[[Property, Any], bool]): A callable that can + validate a property value. + + Returns: + Callable[[Property, Any], bool]: The ``validator``. + + Raises: + TypeError: If ``validator`` is not callable. This is determined by + checking is the attribute ``__call__`` is defined. + """ + # NOTE: Checking for ``_call__`` is done to match the original + # implementation. It's not clear why ``callable()`` was not used. + if getattr(validator, "__call__", None) is None: + raise TypeError( + "validator must be callable or None; received {!r}".format(validator) + ) + + return validator + + def _constructor_info(self): + """Helper for :meth:`__repr__`. + + Yields: + Tuple[str, bool]: Pairs of argument name and a boolean indicating + if that argument is a keyword. + """ + # inspect.signature not available in Python 2.7, so we use positional + # decorator combined with argspec instead. + argspec = getattr(self.__init__, "_argspec", _getfullargspec(self.__init__)) + positional = getattr(self.__init__, "_positional_args", 1) + for index, name in enumerate(argspec.args): + if name == "self": + continue + yield name, index >= positional + + def __repr__(self): + """Return a compact unambiguous string representation of a property. + + This cycles through all stored attributes and displays the ones that + differ from the default values. + """ + args = [] + cls = type(self) + for name, is_keyword in self._constructor_info(): + attr = "_{}".format(name) + instance_val = getattr(self, attr) + default_val = getattr(cls, attr) + + if instance_val is not default_val: + if isinstance(instance_val, type): + as_str = instance_val.__name__ + else: + as_str = repr(instance_val) + + if is_keyword: + as_str = "{}={}".format(name, as_str) + args.append(as_str) + + return "{}({})".format(cls.__name__, ", ".join(args)) + + def _datastore_type(self, value): + """Internal hook used by property filters. + + Sometimes the low-level query interface needs a specific data type + in order for the right filter to be constructed. See + :meth:`_comparison`. + + Args: + value (Any): The value to be converted to a low-level type. + + Returns: + Any: The passed-in ``value``, always. Subclasses may alter this + behavior. + """ + return value + + def _comparison(self, op, value): + """Internal helper for comparison operators. + + Args: + op (str): The comparison operator. One of ``=``, ``!=``, ``<``, + ``<=``, ``>``, ``>=`` or ``in``. + value (Any): The value to compare against. + + Returns: + FilterNode: A FilterNode instance representing the requested + comparison. + + Raises: + BadFilterError: If the current property is not indexed. + """ + # Import late to avoid circular imports. + from google.cloud.ndb import query + + if not self._indexed: + raise exceptions.BadFilterError( + "Cannot query for unindexed property {}".format(self._name) + ) + + if value is not None: + value = self._do_validate(value) + value = self._call_to_base_type(value) + value = self._datastore_type(value) + + return query.FilterNode(self._name, op, value) + + # Comparison operators on Property instances don't compare the + # properties; instead they return ``FilterNode``` instances that can be + # used in queries. + + def __eq__(self, value): + """FilterNode: Represents the ``=`` comparison.""" + return self._comparison("=", value) + + def __ne__(self, value): + """FilterNode: Represents the ``!=`` comparison.""" + return self._comparison("!=", value) + + def __lt__(self, value): + """FilterNode: Represents the ``<`` comparison.""" + return self._comparison("<", value) + + def __le__(self, value): + """FilterNode: Represents the ``<=`` comparison.""" + return self._comparison("<=", value) + + def __gt__(self, value): + """FilterNode: Represents the ``>`` comparison.""" + return self._comparison(">", value) + + def __ge__(self, value): + """FilterNode: Represents the ``>=`` comparison.""" + return self._comparison(">=", value) + + def _validate_and_canonicalize_values(self, value): + if not self._indexed: + raise exceptions.BadFilterError( + "Cannot query for unindexed property {}".format(self._name) + ) + + if not isinstance(value, (list, tuple, set, frozenset)): + raise exceptions.BadArgumentError( + "For field {}, expected list, tuple or set, got {!r}".format( + self._name, value + ) + ) + + values = [] + for sub_value in value: + if sub_value is not None: + sub_value = self._do_validate(sub_value) + sub_value = self._call_to_base_type(sub_value) + sub_value = self._datastore_type(sub_value) + values.append(sub_value) + return values + + def _NOT_IN(self, value, server_op=False): + """.FilterNode: Represents the ``not_in`` filter.""" + # Import late to avoid circular imports. + from google.cloud.ndb import query + + values = self._validate_and_canonicalize_values(value) + return query.FilterNode(self._name, "not_in", values) + + def _IN(self, value, server_op=False): + """For the ``in`` comparison operator. + + The ``in`` operator cannot be overloaded in the way we want + to, so we define a method. For example: + + .. code-block:: python + + Employee.query(Employee.rank.IN([4, 5, 6])) + + Note that the method is called ``_IN()`` but may normally be invoked + as ``IN()``; ``_IN()`` is provided for the case that a + :class:`.StructuredProperty` refers to a model that has a property + named ``IN``. + + Args: + value (Iterable[Any]): The set of values that the property value + must be contained in. + + Returns: + Union[~google.cloud.ndb.query.DisjunctionNode, \ + ~google.cloud.ndb.query.FilterNode, \ + ~google.cloud.ndb.query.FalseNode]: A node corresponding + to the desired in filter. + + * If ``value`` is empty, this will return a :class:`.FalseNode` + * If ``len(value) == 1``, this will return a :class:`.FilterNode` + * Otherwise, this will return a :class:`.DisjunctionNode` + + Raises: + ~google.cloud.ndb.exceptions.BadFilterError: If the current + property is not indexed. + ~google.cloud.ndb.exceptions.BadArgumentError: If ``value`` is not + a basic container (:class:`list`, :class:`tuple`, :class:`set` + or :class:`frozenset`). + """ + # Import late to avoid circular imports. + from google.cloud.ndb import query + + values = self._validate_and_canonicalize_values(value) + return query.FilterNode(self._name, "in", values, server_op=server_op) + + IN = _IN + NOT_IN = _NOT_IN + + """Used to check if a property value is contained in a set of values. + + For example: + + .. code-block:: python + + Employee.query(Employee.rank.IN([4, 5, 6])) + """ + + def __neg__(self): + """Return a descending sort order on this property. + + For example: + + .. code-block:: python + + Employee.query().order(-Employee.rank) + """ + # Import late to avoid circular imports. + from google.cloud.ndb import query + + return query.PropertyOrder(name=self._name, reverse=True) + + def __pos__(self): + """Return an ascending sort order on this property. + + Note that this is redundant but provided for consistency with + :meth:`__neg__`. For example, the following two are equivalent: + + .. code-block:: python + + Employee.query().order(+Employee.rank) + Employee.query().order(Employee.rank) + """ + # Import late to avoid circular imports. + from google.cloud.ndb import query + + return query.PropertyOrder(name=self._name, reverse=False) + + def _do_validate(self, value): + """Call all validations on the value. + + This transforms the ``value`` via: + + * Calling the derived ``_validate()`` method(s) (on subclasses that + don't define ``_to_base_type()``), + * Calling the custom validator function + + After transforming, it checks if the transformed value is in + ``choices`` (if defined). + + It's possible that one of the ``_validate()`` methods will raise + an exception. + + If ``value`` is a base-value, this will do nothing and return it. + + .. note:: + + This does not call all composable ``_validate()`` methods. + It only calls ``_validate()`` methods up to the + first class in the hierarchy that defines a ``_to_base_type()`` + method, when the MRO is traversed looking for ``_validate()`` and + ``_to_base_type()`` methods. + + .. note:: + + For a repeated property this method should be called + for each value in the list, not for the list as a whole. + + Args: + value (Any): The value to be converted / validated. + + Returns: + Any: The transformed ``value``, possibly modified in an idempotent + way. + """ + if self._validator is not None: + new_value = self._validator(self, value) + if new_value is not None: + value = new_value + + if isinstance(value, _BaseValue): + return value + + value = self._call_shallow_validation(value) + + if self._choices is not None: + if value not in self._choices: + raise exceptions.BadValueError( + "Value {!r} for property {} is not an allowed " + "choice".format(value, self._name) + ) + + return value + + def _fix_up(self, cls, code_name): + """Internal helper called to tell the property its name. + + This is called by :meth:`_fix_up_properties`, which is called by + :class:`MetaModel` when finishing the construction of a :class:`Model` + subclass. The name passed in is the name of the class attribute to + which the current property is assigned (a.k.a. the code name). Note + that this means that each property instance must be assigned to (at + most) one class attribute. E.g. to declare three strings, you must + call create three :class:`StringProperty` instances: + + .. code-block:: python + + class MyModel(ndb.Model): + foo = ndb.StringProperty() + bar = ndb.StringProperty() + baz = ndb.StringProperty() + + you cannot write: + + .. code-block:: python + + class MyModel(ndb.Model): + foo = bar = baz = ndb.StringProperty() + + Args: + cls (type): The class that the property is stored on. This argument + is unused by this method, but may be used by subclasses. + code_name (str): The name (on the class) that refers to this + property. + """ + self._code_name = code_name + if self._name is None: + self._name = code_name + + def _store_value(self, entity, value): + """Store a value in an entity for this property. + + This assumes validation has already taken place. For a repeated + property the value should be a list. + + Args: + entity (Model): An entity to set a value on. + value (Any): The value to be stored for this property. + """ + entity._values[self._name] = value + + def _set_value(self, entity, value): + """Set a value in an entity for a property. + + This performs validation first. For a repeated property the value + should be a list (or similar container). + + Args: + entity (Model): An entity to set a value on. + value (Any): The value to be stored for this property. + + Raises: + ReadonlyPropertyError: If the ``entity`` is the result of a + projection query. + exceptions.BadValueError: If the current property is repeated but the + ``value`` is not a basic container (:class:`list`, + :class:`tuple`, :class:`set` or :class:`frozenset`). + """ + if entity._projection: + raise ReadonlyPropertyError( + "You cannot set property values of a projection entity" + ) + + if self._repeated: + if not isinstance(value, (list, tuple, set, frozenset)): + raise exceptions.BadValueError( + "In field {}, expected list or tuple, got {!r}".format( + self._name, value + ) + ) + value = [self._do_validate(v) for v in value] + else: + if value is not None: + value = self._do_validate(value) + + self._store_value(entity, value) + + def _has_value(self, entity, unused_rest=None): + """Determine if the entity has a value for this property. + + Args: + entity (Model): An entity to check if the current property has + a value set. + unused_rest (None): An always unused keyword. + """ + return self._name in entity._values + + def _retrieve_value(self, entity, default=None): + """Retrieve the value for this property from an entity. + + This returns :data:`None` if no value is set, or the ``default`` + argument if given. For a repeated property this returns a list if a + value is set, otherwise :data:`None`. No additional transformations + are applied. + + Args: + entity (Model): An entity to get a value from. + default (Optional[Any]): The default value to use as fallback. + """ + return entity._values.get(self._name, default) + + def _get_user_value(self, entity): + """Return the user value for this property of the given entity. + + This implies removing the :class:`_BaseValue` wrapper if present, and + if it is, calling all ``_from_base_type()`` methods, in the reverse + method resolution order of the property's class. It also handles + default values and repeated properties. + + Args: + entity (Model): An entity to get a value from. + + Returns: + Any: The original value (if not :class:`_BaseValue`) or the wrapped + value converted from the base type. + """ + return self._apply_to_values(entity, self._opt_call_from_base_type) + + def _get_base_value(self, entity): + """Return the base value for this property of the given entity. + + This implies calling all ``_to_base_type()`` methods, in the method + resolution order of the property's class, and adding a + :class:`_BaseValue` wrapper, if one is not already present. (If one + is present, no work is done.) It also handles default values and + repeated properties. + + Args: + entity (Model): An entity to get a value from. + + Returns: + Union[_BaseValue, List[_BaseValue]]: The original value + (if :class:`_BaseValue`) or the value converted to the base type + and wrapped. + """ + return self._apply_to_values(entity, self._opt_call_to_base_type) + + def _get_base_value_unwrapped_as_list(self, entity): + """Like _get_base_value(), but always returns a list. + + Args: + entity (Model): An entity to get a value from. + + Returns: + List[Any]: The unwrapped base values. For an unrepeated + property, if the value is missing or :data:`None`, returns + ``[None]``; for a repeated property, if the original value is + missing or :data:`None` or empty, returns ``[]``. + """ + wrapped = self._get_base_value(entity) + if self._repeated: + return [w.b_val for w in wrapped] + else: + if wrapped is None: + return [None] + return [wrapped.b_val] + + def _opt_call_from_base_type(self, value): + """Call ``_from_base_type()`` if necessary. + + If ``value`` is a :class:`_BaseValue`, unwrap it and call all + :math:`_from_base_type` methods. Otherwise, return the value + unchanged. + + Args: + value (Any): The value to invoke :meth:`_call_from_base_type` + for. + + Returns: + Any: The original value (if not :class:`_BaseValue`) or the value + converted from the base type. + """ + if isinstance(value, _BaseValue): + value = self._call_from_base_type(value.b_val) + return value + + def _value_to_repr(self, value): + """Turn a value (base or not) into its repr(). + + This exists so that property classes can override it separately. + + This manually applies ``_from_base_type()`` so as not to have a side + effect on what's contained in the entity. Printing a value should not + change it. + + Args: + value (Any): The value to convert to a pretty-print ``repr``. + + Returns: + str: The ``repr`` of the "true" value. + """ + val = self._opt_call_from_base_type(value) + return repr(val) + + def _opt_call_to_base_type(self, value): + """Call ``_to_base_type()`` if necessary. + + If ``value`` is a :class:`_BaseValue`, return it unchanged. + Otherwise, call all ``_validate()`` and ``_to_base_type()`` methods + and wrap it in a :class:`_BaseValue`. + + Args: + value (Any): The value to invoke :meth:`_call_to_base_type` + for. + + Returns: + _BaseValue: The original value (if :class:`_BaseValue`) or the + value converted to the base type and wrapped. + """ + if not isinstance(value, _BaseValue): + value = _BaseValue(self._call_to_base_type(value)) + return value + + def _call_from_base_type(self, value): + """Call all ``_from_base_type()`` methods on the value. + + This calls the methods in the reverse method resolution order of + the property's class. + + Args: + value (Any): The value to be converted. + + Returns: + Any: The transformed ``value``. + """ + methods = self._find_methods("_from_base_type", reverse=True) + call = self._apply_list(methods) + return call(value) + + def _call_to_base_type(self, value): + """Call all ``_validate()`` and ``_to_base_type()`` methods on value. + + This calls the methods in the method resolution order of the + property's class. For example, given the hierarchy + + .. code-block:: python + + class A(Property): + def _validate(self, value): + ... + def _to_base_type(self, value): + ... + + class B(A): + def _validate(self, value): + ... + def _to_base_type(self, value): + ... + + class C(B): + def _validate(self, value): + ... + + the full list of methods (in order) is: + + * ``C._validate()`` + * ``B._validate()`` + * ``B._to_base_type()`` + * ``A._validate()`` + * ``A._to_base_type()`` + + Args: + value (Any): The value to be converted / validated. + + Returns: + Any: The transformed ``value``. + """ + methods = self._find_methods("_validate", "_to_base_type") + call = self._apply_list(methods) + value = call(value) + + # Legacy NDB, because it didn't delegate to Datastore for serializing + # entities, would directly write a Key protocol buffer for a key. We, + # however, need to transform NDB keys to Datastore keys before + # delegating to Datastore to generate protocol buffers. You might be + # tempted to do this in KeyProperty._to_base_type, and that works great + # for properties of KeyProperty type. If, however, you're computing a + # key in a ComputedProperty, ComputedProperty doesn't know to call + # KeyProperty's base type. (Probably ComputedProperty should take + # another property type as a constructor argument for this purpose, + # but that wasn't part of the original design and adding it introduces + # backwards compatibility issues.) See: Issue #284 + if isinstance(value, key_module.Key): + value = value._key # Datastore key + + return value + + def _call_shallow_validation(self, value): + """Call the "initial" set of ``_validate()`` methods. + + This is similar to :meth:`_call_to_base_type` except it only calls + those ``_validate()`` methods that can be called without needing to + call ``_to_base_type()``. + + An example: suppose the class hierarchy is + + .. code-block:: python + + class A(Property): + def _validate(self, value): + ... + def _to_base_type(self, value): + ... + + class B(A): + def _validate(self, value): + ... + def _to_base_type(self, value): + ... + + class C(B): + def _validate(self, value): + ... + + The full list of methods (in order) called by + :meth:`_call_to_base_type` is: + + * ``C._validate()`` + * ``B._validate()`` + * ``B._to_base_type()`` + * ``A._validate()`` + * ``A._to_base_type()`` + + whereas the full list of methods (in order) called here stops once + a ``_to_base_type()`` method is encountered: + + * ``C._validate()`` + * ``B._validate()`` + + Args: + value (Any): The value to be converted / validated. + + Returns: + Any: The transformed ``value``. + """ + methods = [] + for method in self._find_methods("_validate", "_to_base_type"): + # Stop if ``_to_base_type()`` is encountered. + if method.__name__ != "_validate": + break + methods.append(method) + + call = self._apply_list(methods) + return call(value) + + @classmethod + def _find_methods(cls, *names, **kwargs): + """Compute a list of composable methods. + + Because this is a common operation and the class hierarchy is + static, the outcome is cached (assuming that for a particular list + of names the reversed flag is either always on, or always off). + + Args: + names (Tuple[str, ...]): One or more method names to look up on + the current class or base classes. + reverse (bool): Optional flag, default False; if True, the list is + reversed. + + Returns: + List[Callable]: Class method objects. + """ + reverse = kwargs.get("reverse", False) + # Get cache on current class / set cache if it doesn't exist. + # Using __qualname__ was better for getting a qualified name, but it's + # not available in Python 2.7. + key = "{}.{}".format(cls.__module__, cls.__name__) + cache = cls._FIND_METHODS_CACHE.setdefault(key, {}) + hit = cache.get(names) + if hit is not None: + if reverse: + return list(reversed(hit)) + else: + return hit + + methods = [] + for klass in cls.__mro__: + for name in names: + method = klass.__dict__.get(name) + if method is not None: + methods.append(method) + + cache[names] = methods + if reverse: + return list(reversed(methods)) + else: + return methods + + def _apply_list(self, methods): + """Chain together a list of callables for transforming a value. + + .. note:: + + Each callable in ``methods`` is an unbound instance method, e.g. + accessed via ``Property.foo`` rather than ``instance.foo``. + Therefore, calling these methods will require ``self`` as the + first argument. + + If one of the method returns :data:`None`, the previous value is kept; + otherwise the last value is replace. + + Exceptions thrown by a method in ``methods`` are not caught, so it + is up to the caller to catch them. + + Args: + methods (Iterable[Callable[[Any], Any]]): An iterable of methods + to apply to a value. + + Returns: + Callable[[Any], Any]: A callable that takes a single value and + applies each method in ``methods`` to it. + """ + + def call(value): + for method in methods: + new_value = method(self, value) + if new_value is not None: + value = new_value + return value + + return call + + def _apply_to_values(self, entity, function): + """Apply a function to the property value / values of a given entity. + + This retrieves the property value, applies the function, and then + stores the value back. For a repeated property, the function is + applied separately to each of the values in the list. The + resulting value or list of values is both stored back in the + entity and returned from this method. + + Args: + entity (Model): An entity to get a value from. + function (Callable[[Any], Any]): A transformation to apply to + the value. + + Returns: + Any: The transformed value store on the entity for this property. + """ + value = self._retrieve_value(entity, self._default) + if self._repeated: + if value is None: + value = [] + self._store_value(entity, value) + else: + # NOTE: This assumes, but does not check, that ``value`` is + # iterable. This relies on ``_set_value`` having checked + # and converted to a ``list`` for a repeated property. + value[:] = map(function, value) + else: + if value is not None: + new_value = function(value) + if new_value is not None and new_value is not value: + self._store_value(entity, new_value) + value = new_value + + return value + + def _get_value(self, entity): + """Get the value for this property from an entity. + + For a repeated property this initializes the value to an empty + list if it is not set. + + Args: + entity (Model): An entity to get a value from. + + Returns: + Any: The user value stored for the current property. + + Raises: + UnprojectedPropertyError: If the ``entity`` is the result of a + projection query and the current property is not one of the + projected properties. + """ + if entity._projection: + if self._name not in entity._projection: + raise UnprojectedPropertyError( + "Property {} is not in the projection".format(self._name) + ) + + return self._get_user_value(entity) + + def _delete_value(self, entity): + """Delete the value for this property from an entity. + + .. note:: + + If no value exists this is a no-op; deleted values will not be + serialized but requesting their value will return :data:`None` (or + an empty list in the case of a repeated property). + + Args: + entity (Model): An entity to get a value from. + """ + if self._name in entity._values: + del entity._values[self._name] + + def _is_initialized(self, entity): + """Ask if the entity has a value for this property. + + This returns :data:`False` if a value is stored but the stored value + is :data:`None`. + + Args: + entity (Model): An entity to get a value from. + """ + return not self._required or ( + (self._has_value(entity) or self._default is not None) + and self._get_value(entity) is not None + ) + + def __get__(self, entity, unused_cls=None): + """Descriptor protocol: get the value from the entity. + + Args: + entity (Model): An entity to get a value from. + unused_cls (type): The class that owns this instance. + """ + if entity is None: + # Handle the case where ``__get__`` is called on the class + # rather than an instance. + return self + return self._get_value(entity) + + def __set__(self, entity, value): + """Descriptor protocol: set the value on the entity. + + Args: + entity (Model): An entity to set a value on. + value (Any): The value to set. + """ + self._set_value(entity, value) + + def __delete__(self, entity): + """Descriptor protocol: delete the value from the entity. + + Args: + entity (Model): An entity to delete a value from. + """ + self._delete_value(entity) + + def _serialize(self, entity, pb, prefix="", parent_repeated=False, projection=None): + """Serialize this property to a protocol buffer. + + Some subclasses may override this method. + + Args: + entity (Model): The entity that owns this property. + pb (google.cloud.datastore_v1.proto.entity_pb2.Entity): An existing + entity protobuf instance that we'll add a value to. + prefix (Optional[str]): Name prefix used for + :class:`StructuredProperty` (if present, must end in ``.``). + parent_repeated (Optional[bool]): Indicates if the parent (or an + earlier ancestor) is a repeated property. + projection (Optional[Union[list, tuple]]): An iterable of strings + representing the projection for the model instance, or + :data:`None` if the instance is not a projection. + + Raises: + NotImplementedError: Always. No longer implemented. + """ + raise exceptions.NoLongerImplementedError() + + def _deserialize(self, entity, p, unused_depth=1): + """Deserialize this property from a protocol buffer. + + Raises: + NotImplementedError: Always. This method is deprecated. + """ + raise exceptions.NoLongerImplementedError() + + def _legacy_deserialize(self, entity, p, unused_depth=1): + """Internal helper to deserialize this property from a protocol buffer. + Ported from legacy NDB, used for decoding pickle properties. + This is an older style GAE protocol buffer deserializer and is not + used to deserialize the modern Google Cloud Datastore protocol buffer. + + Subclasses may override this method. + + Args: + entity: The entity, a Model (subclass) instance. + p: A Property Message object (a protocol buffer). + depth: Optional nesting depth, default 1 (unused here, but used + by some subclasses that override this method). + """ + + if p.meaning() == _legacy_entity_pb.Property.EMPTY_LIST: + self._store_value(entity, []) + return + + val = self._legacy_db_get_value(p.value(), p) + if val is not None: + val = _BaseValue(val) + + # TODO(from legacy-datastore port): May never be suitable. + # replace the remainder of the function with the following commented + # out code once its feasible to make breaking changes such as not calling + # _store_value(). + + # if self._repeated: + # entity._values.setdefault(self._name, []).append(val) + # else: + # entity._values[self._name] = val + + if self._repeated: + if self._has_value(entity): + value = self._retrieve_value(entity) + assert isinstance(value, list), repr(value) + value.append(val) + else: + # We promote single values to lists if we are a list property + value = [val] + else: + value = val + self._store_value(entity, value) + + def _db_set_value(self, v, unused_p, value): + """Helper for :meth:`_serialize`. + + Raises: + NotImplementedError: Always. No longer implemented. + """ + raise exceptions.NoLongerImplementedError() + + def _db_get_value(self, v, unused_p): + """Helper for :meth:`_deserialize`. + + Raises: + NotImplementedError: Always. This method is deprecated. + """ + raise exceptions.NoLongerImplementedError() + + @staticmethod + def _legacy_db_get_value(v, p): + # Ported from https://github.com/GoogleCloudPlatform/datastore-ndb-python/blob/cf4cab3f1f69cd04e1a9229871be466b53729f3f/ndb/model.py#L2647 + entity_pb = _legacy_entity_pb + # A custom 'meaning' for compressed properties. + _MEANING_URI_COMPRESSED = "ZLIB" + # The Epoch (a zero POSIX timestamp). + _EPOCH = datetime.datetime.utcfromtimestamp(0) + # This is awkward but there seems to be no faster way to inspect + # what union member is present. datastore_types.FromPropertyPb(), + # the undisputed authority, has the same series of if-elif blocks. + # (We don't even want to think about multiple members... :-) + if v.has_stringvalue(): + sval = v.stringvalue() + meaning = p.meaning() + if meaning == entity_pb.Property.BLOBKEY: + sval = BlobKey(sval) + elif meaning == entity_pb.Property.BLOB: + if p.meaning_uri() == _MEANING_URI_COMPRESSED: + sval = _CompressedValue(sval) + elif meaning == entity_pb.Property.ENTITY_PROTO: + # NOTE: This is only used for uncompressed LocalStructuredProperties. + pb = entity_pb.EntityProto() + pb.MergePartialFromString(sval) + modelclass = Expando + if pb.key().path.element_size(): + kind = pb.key().path.element[-1].type + modelclass = Model._kind_map.get(kind, modelclass) + sval = modelclass._from_pb(pb) + elif meaning != entity_pb.Property.BYTESTRING: + try: + sval.decode("ascii") + # If this passes, don't return unicode. + except UnicodeDecodeError: + try: + sval = str(sval.decode("utf-8")) + except UnicodeDecodeError: + pass + return sval + elif v.has_int64value(): + ival = v.int64value() + if p.meaning() == entity_pb.Property.GD_WHEN: + return _EPOCH + datetime.timedelta(microseconds=ival) + return ival + elif v.has_booleanvalue(): + # The booleanvalue field is an int32, so booleanvalue() returns + # an int, hence the conversion. + return bool(v.booleanvalue()) + elif v.has_doublevalue(): + return v.doublevalue() + elif v.has_referencevalue(): + rv = v.referencevalue() + app = rv.app() + namespace = rv.name_space() + pairs = [ + (elem.type(), elem.id() or elem.name()) + for elem in rv.pathelement_list() + ] + return Key(pairs=pairs, app=app, namespace=namespace) + elif v.has_pointvalue(): + pv = v.pointvalue() + return GeoPt(pv.x(), pv.y()) + elif v.has_uservalue(): + return _unpack_user(v) + else: + # A missing value implies null. + return None + + def _prepare_for_put(self, entity): + """Allow this property to define a pre-put hook. + + This base class implementation does nothing, but subclasses may + provide hooks. + + Args: + entity (Model): An entity with values. + """ + pass + + def _check_property(self, rest=None, require_indexed=True): + """Check this property for specific requirements. + + Called by ``Model._check_properties()``. + + Args: + rest: Optional subproperty to check, of the form + ``name1.name2...nameN``. + required_indexed (bool): Indicates if the current property must + be indexed. + + Raises: + InvalidPropertyError: If ``require_indexed`` is :data:`True` + but the current property is not indexed. + InvalidPropertyError: If a subproperty is specified via ``rest`` + (:class:`StructuredProperty` overrides this method to handle + subproperties). + """ + if require_indexed and not self._indexed: + raise InvalidPropertyError("Property is unindexed: {}".format(self._name)) + + if rest: + raise InvalidPropertyError( + "Referencing subproperty {}.{} but {} is not a structured " + "property".format(self._name, rest, self._name) + ) + + def _get_for_dict(self, entity): + """Retrieve the value like ``_get_value()``. + + This is intended to be processed for ``_to_dict()``. + + Property subclasses can override this if they want the dictionary + returned by ``entity._to_dict()`` to contain a different value. The + main use case is allowing :class:`StructuredProperty` and + :class:`LocalStructuredProperty` to allow the default ``_get_value()`` + behavior. + + * If you override ``_get_for_dict()`` to return a different type, you + must override ``_validate()`` to accept values of that type and + convert them back to the original type. + + * If you override ``_get_for_dict()``, you must handle repeated values + and :data:`None` correctly. However, ``_validate()`` does not need to + handle these. + + Args: + entity (Model): An entity to get a value from. + + Returns: + Any: The user value stored for the current property. + """ + return self._get_value(entity) + + def _to_datastore(self, entity, data, prefix="", repeated=False): + """Helper to convert property to Datastore serializable data. + + Called to help assemble a Datastore entity prior to serialization for + storage. Subclasses (like StructuredProperty) may need to override the + default behavior. + + Args: + entity (entity.Entity): The NDB entity to convert. + data (dict): The data that will eventually be used to construct the + Datastore entity. This method works by updating ``data``. + prefix (str): Optional name prefix used for StructuredProperty (if + present, must end in ".". + repeated (bool): `True` if values should be repeated because an + ancestor node is repeated property. + + Return: + Sequence[str]: Any keys that were set on ``data`` by this method + call. + """ + value = self._get_base_value_unwrapped_as_list(entity) + if not self._repeated: + value = value[0] + + key = prefix + self._name + if repeated: + data.setdefault(key, []).append(value) + else: + data[key] = value + + if not self._indexed: + data["_exclude_from_indexes"].append(key) + + return (key,) + + def _from_datastore(self, ds_entity, value): + """Helper to convert property value from Datastore serializable data. + + Called to modify the value of a property during deserialization from + storage. Subclasses (like BlobProperty) may need to override the + default behavior, which is simply to return the received value without + modification. + + Args: + ds_entity (~google.cloud.datastore.Entity): The Datastore entity to + convert. + value (_BaseValue): The stored value of this property for the + entity being deserialized. + + Return: + value [Any]: The transformed value. + """ + return value + + +def _validate_key(value, entity=None): + """Validate a key. + + Args: + value (~google.cloud.ndb.key.Key): The key to be validated. + entity (Optional[Model]): The entity that the key is being validated + for. + + Returns: + ~google.cloud.ndb.key.Key: The passed in ``value``. + + Raises: + exceptions.BadValueError: If ``value`` is not a :class:`~google.cloud.ndb.key.Key`. + KindError: If ``entity`` is specified, but the kind of the entity + doesn't match the kind of ``value``. + """ + if not isinstance(value, Key): + raise exceptions.BadValueError("Expected Key, got {!r}".format(value)) + + if entity and type(entity) not in (Model, Expando): + if value.kind() != entity._get_kind(): + raise KindError( + "Expected Key kind to be {}; received " + "{}".format(entity._get_kind(), value.kind()) + ) + + return value + + +class ModelKey(Property): + """Special property to store a special "key" for a :class:`Model`. + + This is intended to be used as a pseudo-:class:`Property` on each + :class:`Model` subclass. It is **not** intended for other usage in + application code. + + It allows key-only queries to be done for a given kind. + + .. automethod:: _validate + """ + + def __init__(self): + super(ModelKey, self).__init__() + self._name = "__key__" + + def _comparison(self, op, value): + """Internal helper for comparison operators. + + This uses the base implementation in :class:`Property`, but doesn't + allow comparison to :data:`None`. + + Args: + op (str): The comparison operator. One of ``=``, ``!=``, ``<``, + ``<=``, ``>``, ``>=`` or ``in``. + value (Any): The value to compare against. + + Returns: + FilterNode: A FilterNode instance representing the requested + comparison. + + Raises: + exceptions.BadValueError: If ``value`` is :data:`None`. + """ + if value is not None: + return super(ModelKey, self)._comparison(op, value) + + raise exceptions.BadValueError("__key__ filter query can't be compared to None") + + def _validate(self, value): + """Validate a ``value`` before setting it. + + Args: + value (~google.cloud.ndb.key.Key): The value to check. + + Returns: + ~google.cloud.ndb.key.Key: The passed-in ``value``. + """ + return _validate_key(value) + + @staticmethod + def _set_value(entity, value): + """Set the entity key on an entity. + + Args: + entity (Model): An entity to set the entity key on. + value (~google.cloud.ndb.key.Key): The key to be set on the entity. + """ + if value is not None: + value = _validate_key(value, entity=entity) + value = entity._validate_key(value) + + entity._entity_key = value + + @staticmethod + def _get_value(entity): + """Get the entity key from an entity. + + Args: + entity (Model): An entity to get the entity key from. + + Returns: + ~google.cloud.ndb.key.Key: The entity key stored on ``entity``. + """ + return entity._entity_key + + @staticmethod + def _delete_value(entity): + """Remove / disassociate the entity key from an entity. + + Args: + entity (Model): An entity to remove the entity key from. + """ + entity._entity_key = None + + +class BooleanProperty(Property): + """A property that contains values of type bool. + + .. automethod:: _validate + """ + + def _validate(self, value): + """Validate a ``value`` before setting it. + + Args: + value (bool): The value to check. + + Returns: + bool: The passed-in ``value``. + + Raises: + exceptions.BadValueError: If ``value`` is not a :class:`bool`. + """ + if not isinstance(value, bool): + raise exceptions.BadValueError( + "In field {}, expected bool, got {!r}".format(self._name, value) + ) + return value + + def _from_base_type(self, value): + """Convert a value from the "base" value type for this property. + + Args: + value (Union[int, bool]): The value to be converted. + + Returns: + Optional[bool]: The converted value. If the current property is + an ``int`` value, this will convert to a ``bool``. + """ + # When loading a LocalStructuredProperty from a database written with the legacy + # GAE NDB, the boolean properties will have int values. + # See: Issue #623 (https://github.com/googleapis/python-ndb/issues/623) + if type(value) is int: + return bool(value) + + +class IntegerProperty(Property): + """A property that contains values of type integer. + + .. note:: + + If a value is a :class:`bool`, it will be coerced to ``0`` (for + :data:`False`) or ``1`` (for :data:`True`). + + .. automethod:: _validate + """ + + def _validate(self, value): + """Validate a ``value`` before setting it. + + Args: + value (Union[int, bool]): The value to check. + + Returns: + int: The passed-in ``value``. + + Raises: + exceptions.BadValueError: If ``value`` is not an :class:`int` or convertible + to one. + """ + if not isinstance(value, int): + raise exceptions.BadValueError( + "In field {}, expected integer, got {!r}".format(self._name, value) + ) + return int(value) + + +class FloatProperty(Property): + """A property that contains values of type float. + + .. note:: + + If a value is a :class:`bool` or :class:`int`, it will be + coerced to a floating point value. + + .. automethod:: _validate + """ + + def _validate(self, value): + """Validate a ``value`` before setting it. + + Args: + value (Union[float, int, bool]): The value to check. + + Returns: + float: The passed-in ``value``, possibly converted to a + :class:`float`. + + Raises: + exceptions.BadValueError: If ``value`` is not a :class:`float` or convertible + to one. + """ + if not isinstance(value, (float, int)): + raise exceptions.BadValueError( + "In field {}, expected float, got {!r}".format(self._name, value) + ) + return float(value) + + +class _CompressedValue(bytes): + """A marker object wrapping compressed values. + + Args: + z_val (bytes): A return value of ``zlib.compress``. + """ + + def __init__(self, z_val): + self.z_val = z_val + + def __repr__(self): + return "_CompressedValue({!r})".format(self.z_val) + + def __eq__(self, other): + """Compare two compressed values.""" + if not isinstance(other, _CompressedValue): + return NotImplemented + + return self.z_val == other.z_val + + def __hash__(self): + raise TypeError("_CompressedValue is not immutable") + + +class BlobProperty(Property): + """A property that contains values that are byte strings. + + .. note:: + + Unlike most property types, a :class:`BlobProperty` is **not** + indexed by default. + + .. automethod:: _to_base_type + .. automethod:: _from_base_type + .. automethod:: _validate + + Args: + name (str): The name of the property. + compressed (bool): Indicates if the value should be compressed (via + ``zlib``). + indexed (bool): Indicates if the value should be indexed. + repeated (bool): Indicates if this property is repeated, i.e. contains + multiple values. + required (bool): Indicates if this property is required on the given + model type. + default (bytes): The default value for this property. + choices (Iterable[bytes]): A container of allowed values for this + property. + validator (Callable[[~google.cloud.ndb.model.Property, Any], bool]): A + validator to be used to check values. + verbose_name (str): A longer, user-friendly name for this property. + write_empty_list (bool): Indicates if an empty list should be written + to the datastore. + + Raises: + NotImplementedError: If the property is both compressed and indexed. + """ + + _indexed = False + _compressed = False + + @utils.positional(2) + def __init__( + self, + name=None, + compressed=None, + indexed=None, + repeated=None, + required=None, + default=None, + choices=None, + validator=None, + verbose_name=None, + write_empty_list=None, + ): + super(BlobProperty, self).__init__( + name=name, + indexed=indexed, + repeated=repeated, + required=required, + default=default, + choices=choices, + validator=validator, + verbose_name=verbose_name, + write_empty_list=write_empty_list, + ) + if compressed is not None: + self._compressed = compressed + if self._compressed and self._indexed: + raise NotImplementedError( + "BlobProperty {} cannot be compressed and " + "indexed at the same time.".format(self._name) + ) + + def _value_to_repr(self, value): + """Turn the value into a user friendly representation. + + .. note:: + + This will truncate the value based on the "visual" length, e.g. + if it contains many ``\\xXX`` or ``\\uUUUU`` sequences, those + will count against the length as more than one character. + + Args: + value (Any): The value to convert to a pretty-print ``repr``. + + Returns: + str: The ``repr`` of the "true" value. + """ + long_repr = super(BlobProperty, self)._value_to_repr(value) + if len(long_repr) > _MAX_STRING_LENGTH + 4: + # Truncate, assuming the final character is the closing quote. + long_repr = long_repr[:_MAX_STRING_LENGTH] + "..." + long_repr[-1] + return long_repr + + def _validate(self, value): + """Validate a ``value`` before setting it. + + Args: + value (bytes): The value to check. + + Raises: + exceptions.BadValueError: If ``value`` is not a :class:`bytes`. + exceptions.BadValueError: If the current property is indexed but the value + exceeds the maximum length (1500 bytes). + """ + if not isinstance(value, bytes): + raise exceptions.BadValueError( + "In field {}, expected bytes, got {!r}".format(self._name, value) + ) + + if self._indexed and len(value) > _MAX_STRING_LENGTH: + raise exceptions.BadValueError( + "Indexed value {} must be at most {:d} " + "bytes".format(self._name, _MAX_STRING_LENGTH) + ) + + def _to_base_type(self, value): + """Convert a value to the "base" value type for this property. + + Args: + value (bytes): The value to be converted. + + Returns: + Optional[bytes]: The converted value. If the current property is + compressed, this will return a wrapped version of the compressed + value. Otherwise, it will return :data:`None` to indicate that + the value didn't need to be converted. + """ + if self._compressed: + return _CompressedValue(zlib.compress(value)) + + def _from_base_type(self, value): + """Convert a value from the "base" value type for this property. + + Args: + value (bytes): The value to be converted. + + Returns: + Optional[bytes]: The converted value. If the current property is + a (wrapped) compressed value, this will unwrap the value and return + the decompressed form. Otherwise, it will return :data:`None` to + indicate that the value didn't need to be unwrapped and + decompressed. + """ + # First, check for legacy compressed LocalStructuredProperty values. + # See https://github.com/googleapis/python-ndb/issues/359 + if self._compressed and isinstance(value, ds_entity_module.Entity): + return + + if self._compressed and not isinstance(value, _CompressedValue): + if not value.startswith(_ZLIB_COMPRESSION_MARKERS): + return value + value = _CompressedValue(value) + + if isinstance(value, _CompressedValue): + return zlib.decompress(value.z_val) + + def _to_datastore(self, entity, data, prefix="", repeated=False): + """Override of :method:`Property._to_datastore`. + + If this is a compressed property, we need to set the backwards- + compatible `_meanings` field, so that it can be properly read later. + """ + keys = super(BlobProperty, self)._to_datastore( + entity, data, prefix=prefix, repeated=repeated + ) + if self._compressed: + key = prefix + self._name + value = data[key] + if isinstance(value, _CompressedValue): + value = value.z_val + data[key] = value + + if self._repeated: + compressed_value = [] + for rval in value: + if rval and not rval.startswith(_ZLIB_COMPRESSION_MARKERS): + rval = zlib.compress(rval) + compressed_value.append(rval) + value = compressed_value + data[key] = value + if not self._repeated: + values = [ + zlib.compress(v) + if v and not v.startswith(_ZLIB_COMPRESSION_MARKERS) + else v + for v in (value if repeated else [value]) + ] + value = values if repeated else values[0] + data[key] = value + + if value and not repeated: + data.setdefault("_meanings", {})[key] = ( + _MEANING_COMPRESSED, + value, + ) + return keys + + def _from_datastore(self, ds_entity, value): + """Override of :method:`Property._from_datastore`. + + Need to check the ds_entity for a compressed meaning that would + indicate we are getting a compressed value. + """ + if self._name in ds_entity._meanings and not self._compressed: + root_meaning = ds_entity._meanings[self._name][0] + sub_meanings = None + # meaning may be a tuple. Attempt unwrap + if isinstance(root_meaning, tuple): + root_meaning, sub_meanings = root_meaning + # decompress values if needed + if root_meaning == _MEANING_COMPRESSED and not self._repeated: + value.b_val = zlib.decompress(value.b_val) + elif root_meaning == _MEANING_COMPRESSED and self._repeated: + for sub_value in value: + sub_value.b_val = zlib.decompress(sub_value.b_val) + elif isinstance(sub_meanings, list) and self._repeated: + for idx, sub_value in enumerate(value): + try: + if sub_meanings[idx] == _MEANING_COMPRESSED: + sub_value.b_val = zlib.decompress(sub_value.b_val) + except IndexError: + # value list size exceeds sub_meanings list + break + return value + + def _db_set_compressed_meaning(self, p): + """Helper for :meth:`_db_set_value`. + + Raises: + NotImplementedError: Always. No longer implemented. + """ + raise exceptions.NoLongerImplementedError() + + def _db_set_uncompressed_meaning(self, p): + """Helper for :meth:`_db_set_value`. + + Raises: + NotImplementedError: Always. No longer implemented. + """ + raise exceptions.NoLongerImplementedError() + + +class CompressedTextProperty(BlobProperty): + """A version of :class:`TextProperty` which compresses values. + + Values are stored as ``zlib`` compressed UTF-8 byte sequences rather than + as strings as in a regular :class:`TextProperty`. This class allows NDB to + support passing `compressed=True` to :class:`TextProperty`. It is not + necessary to instantiate this class directly. + """ + + __slots__ = () + + def __init__(self, *args, **kwargs): + indexed = kwargs.pop("indexed", False) + if indexed: + raise NotImplementedError( + "A TextProperty cannot be indexed. Previously this was " + "allowed, but this usage is no longer supported." + ) + + kwargs["compressed"] = True + super(CompressedTextProperty, self).__init__(*args, **kwargs) + + def _constructor_info(self): + """Helper for :meth:`__repr__`. + + Yields: + Tuple[str, bool]: Pairs of argument name and a boolean indicating + if that argument is a keyword. + """ + parent_init = super(CompressedTextProperty, self).__init__ + # inspect.signature not available in Python 2.7, so we use positional + # decorator combined with argspec instead. + argspec = getattr(parent_init, "_argspec", _getfullargspec(parent_init)) + positional = getattr(parent_init, "_positional_args", 1) + for index, name in enumerate(argspec.args): + if name in ("self", "indexed", "compressed"): + continue + yield name, index >= positional + + @property + def _indexed(self): + """bool: Indicates that the property is not indexed.""" + return False + + def _validate(self, value): + """Validate a ``value`` before setting it. + + Args: + value (Union[bytes, str]): The value to check. + + Raises: + exceptions.BadValueError: If ``value`` is :class:`bytes`, but is not a valid + UTF-8 encoded string. + exceptions.BadValueError: If ``value`` is neither :class:`bytes` nor + :class:`str`. + exceptions.BadValueError: If the current property is indexed but the UTF-8 + encoded value exceeds the maximum length (1500 bytes). + """ + if not isinstance(value, str): + # In Python 2.7, bytes is a synonym for str + if isinstance(value, bytes): + try: + value = value.decode("utf-8") + except UnicodeError: + raise exceptions.BadValueError( + "In field {}, expected valid UTF-8, got {!r}".format( + self._name, value + ) + ) + else: + raise exceptions.BadValueError( + "In field {}, expected string, got {!r}".format(self._name, value) + ) + + def _to_base_type(self, value): + """Convert a value to the "base" value type for this property. + + Args: + value (Union[bytes, str]): The value to be converted. + + Returns: + Optional[bytes]: The converted value. If ``value`` is a + :class:`str`, this will return the UTF-8 encoded bytes for it. + Otherwise, it will return :data:`None`. + """ + if isinstance(value, str): + return value.encode("utf-8") + + def _from_base_type(self, value): + """Convert a value from the "base" value type for this property. + + .. note:: + + Older versions of ``ndb`` could write non-UTF-8 ``TEXT`` + properties. This means that if ``value`` is :class:`bytes`, but is + not a valid UTF-8 encoded string, it can't (necessarily) be + rejected. But, :meth:`_validate` now rejects such values, so it's + not possible to write new non-UTF-8 ``TEXT`` properties. + + Args: + value (Union[bytes, str]): The value to be converted. + + Returns: + Optional[str]: The converted value. If ``value`` is a valid UTF-8 + encoded :class:`bytes` string, this will return the decoded + :class:`str` corresponding to it. Otherwise, it will return + :data:`None`. + """ + if isinstance(value, bytes): + try: + return value.decode("utf-8") + except UnicodeError: + pass + + def _db_set_uncompressed_meaning(self, p): + """Helper for :meth:`_db_set_value`. + + Raises: + NotImplementedError: Always. This method is virtual. + """ + raise NotImplementedError + + +class TextProperty(Property): + """An unindexed property that contains UTF-8 encoded text values. + + A :class:`TextProperty` is intended for values of unlimited length, hence + is **not** indexed. Previously, a :class:`TextProperty` could be indexed + via: + + .. code-block:: python + + class Item(ndb.Model): + description = ndb.TextProperty(indexed=True) + ... + + but this usage is no longer supported. If indexed text is desired, a + :class:`StringProperty` should be used instead. + + .. automethod:: _to_base_type + .. automethod:: _from_base_type + .. automethod:: _validate + + Args: + name (str): The name of the property. + compressed (bool): Indicates if the value should be compressed (via + ``zlib``). An instance of :class:`CompressedTextProperty` will be + substituted if `True`. + indexed (bool): Indicates if the value should be indexed. + repeated (bool): Indicates if this property is repeated, i.e. contains + multiple values. + required (bool): Indicates if this property is required on the given + model type. + default (Any): The default value for this property. + choices (Iterable[Any]): A container of allowed values for this + property. + validator (Callable[[~google.cloud.ndb.model.Property, Any], bool]): A + validator to be used to check values. + verbose_name (str): A longer, user-friendly name for this property. + write_empty_list (bool): Indicates if an empty list should be written + to the datastore. + + Raises: + NotImplementedError: If ``indexed=True`` is provided. + """ + + def __new__(cls, *args, **kwargs): + # If "compressed" is True, substitute CompressedTextProperty + compressed = kwargs.get("compressed", False) + if compressed: + return CompressedTextProperty(*args, **kwargs) + + return super(TextProperty, cls).__new__(cls) + + def __init__(self, *args, **kwargs): + indexed = kwargs.pop("indexed", False) + if indexed: + raise NotImplementedError( + "A TextProperty cannot be indexed. Previously this was " + "allowed, but this usage is no longer supported." + ) + + super(TextProperty, self).__init__(*args, **kwargs) + + def _constructor_info(self): + """Helper for :meth:`__repr__`. + + Yields: + Tuple[str, bool]: Pairs of argument name and a boolean indicating + if that argument is a keyword. + """ + parent_init = super(TextProperty, self).__init__ + # inspect.signature not available in Python 2.7, so we use positional + # decorator combined with argspec instead. + argspec = getattr(parent_init, "_argspec", _getfullargspec(parent_init)) + positional = getattr(parent_init, "_positional_args", 1) + for index, name in enumerate(argspec.args): + if name == "self" or name == "indexed": + continue + yield name, index >= positional + + @property + def _indexed(self): + """bool: Indicates that the property is not indexed.""" + return False + + def _validate(self, value): + """Validate a ``value`` before setting it. + + Args: + value (Union[bytes, str]): The value to check. + + Raises: + exceptions.BadValueError: If ``value`` is :class:`bytes`, but is not a valid + UTF-8 encoded string. + exceptions.BadValueError: If ``value`` is neither :class:`bytes` nor + :class:`str`. + exceptions.BadValueError: If the current property is indexed but the UTF-8 + encoded value exceeds the maximum length (1500 bytes). + """ + if isinstance(value, bytes): + try: + encoded_length = len(value) + value = value.decode("utf-8") + except UnicodeError: + raise exceptions.BadValueError( + "In field {}, expected valid UTF-8, got {!r}".format( + self._name, value + ) + ) + elif isinstance(value, str): + encoded_length = len(value.encode("utf-8")) + else: + raise exceptions.BadValueError("Expected string, got {!r}".format(value)) + + if self._indexed and encoded_length > _MAX_STRING_LENGTH: + raise exceptions.BadValueError( + "Indexed value {} must be at most {:d} " + "bytes".format(self._name, _MAX_STRING_LENGTH) + ) + + def _to_base_type(self, value): + """Convert a value to the "base" value type for this property. + + Args: + value (Union[bytes, str]): The value to be converted. + + Returns: + Optional[str]: The converted value. If ``value`` is a + :class:`bytes`, this will return the UTF-8 decoded ``str`` for it. + Otherwise, it will return :data:`None`. + """ + if isinstance(value, bytes): + return value.decode("utf-8") + + def _from_base_type(self, value): + """Convert a value from the "base" value type for this property. + + .. note:: + + Older versions of ``ndb`` could write non-UTF-8 ``TEXT`` + properties. This means that if ``value`` is :class:`bytes`, but is + not a valid UTF-8 encoded string, it can't (necessarily) be + rejected. But, :meth:`_validate` now rejects such values, so it's + not possible to write new non-UTF-8 ``TEXT`` properties. + + Args: + value (Union[bytes, str]): The value to be converted. + + Returns: + Optional[str]: The converted value. If ``value`` is a a valid UTF-8 + encoded :class:`bytes` string, this will return the decoded + :class:`str` corresponding to it. Otherwise, it will return + :data:`None`. + """ + if isinstance(value, bytes): + try: + return value.decode("utf-8") + except UnicodeError: + pass + + def _db_set_uncompressed_meaning(self, p): + """Helper for :meth:`_db_set_value`. + + Raises: + NotImplementedError: Always. No longer implemented. + """ + raise exceptions.NoLongerImplementedError() + + +class StringProperty(TextProperty): + """An indexed property that contains UTF-8 encoded text values. + + This is nearly identical to :class:`TextProperty`, but is indexed. Values + must be at most 1500 bytes (when UTF-8 encoded from :class:`str` to bytes). + + Raises: + NotImplementedError: If ``indexed=False`` is provided. + """ + + def __init__(self, *args, **kwargs): + indexed = kwargs.pop("indexed", True) + if not indexed: + raise NotImplementedError( + "A StringProperty must be indexed. Previously setting " + "``indexed=False`` was allowed, but this usage is no longer " + "supported." + ) + + super(StringProperty, self).__init__(*args, **kwargs) + + @property + def _indexed(self): + """bool: Indicates that the property is indexed.""" + return True + + +class GeoPtProperty(Property): + """A property that contains :attr:`.GeoPt` values. + + .. automethod:: _validate + """ + + def _validate(self, value): + """Validate a ``value`` before setting it. + + Args: + value (~google.cloud.datastore.helpers.GeoPoint): The value to + check. + + Raises: + exceptions.BadValueError: If ``value`` is not a :attr:`.GeoPt`. + """ + if not isinstance(value, GeoPt): + raise exceptions.BadValueError( + "In field {}, expected GeoPt, got {!r}".format(self._name, value) + ) + + +class PickleProperty(BlobProperty): + """A property that contains values that are pickle-able. + + .. note:: + + Unlike most property types, a :class:`PickleProperty` is **not** + indexed by default. + + This will use :func:`pickle.dumps` with the highest available pickle + protocol to convert to bytes and :func:`pickle.loads` to convert **from** + bytes. The base value stored in the datastore will be the pickled bytes. + + .. automethod:: _to_base_type + .. automethod:: _from_base_type + """ + + def _to_base_type(self, value): + """Convert a value to the "base" value type for this property. + + Args: + value (Any): The value to be converted. + + Returns: + bytes: The pickled ``value``. + """ + return pickle.dumps(value, pickle.HIGHEST_PROTOCOL) + + def _from_base_type(self, value): + """Convert a value from the "base" value type for this property. + + Args: + value (bytes): The value to be converted. + + Returns: + Any: The unpickled ``value``. + """ + if type(value) is bytes: # pragma: NO BRANCH + return pickle.loads(value, encoding="bytes") + return pickle.loads(value) # pragma: NO COVER + + +class JsonProperty(BlobProperty): + """A property that contains JSON-encodable values. + + .. note:: + + Unlike most property types, a :class:`JsonProperty` is **not** + indexed by default. + + .. automethod:: _to_base_type + .. automethod:: _from_base_type + .. automethod:: _validate + + Args: + name (str): The name of the property. + compressed (bool): Indicates if the value should be compressed (via + ``zlib``). + json_type (type): The expected type of values that this property can + hold. If :data:`None`, any type is allowed. + indexed (bool): Indicates if the value should be indexed. + repeated (bool): Indicates if this property is repeated, i.e. contains + multiple values. + required (bool): Indicates if this property is required on the given + model type. + default (Any): The default value for this property. + choices (Iterable[Any]): A container of allowed values for this + property. + validator (Callable[[~google.cloud.ndb.model.Property, Any], bool]): A + validator to be used to check values. + verbose_name (str): A longer, user-friendly name for this property. + write_empty_list (bool): Indicates if an empty list should be written + to the datastore. + """ + + _json_type = None + + @utils.positional(2) + def __init__( + self, + name=None, + compressed=None, + json_type=None, + indexed=None, + repeated=None, + required=None, + default=None, + choices=None, + validator=None, + verbose_name=None, + write_empty_list=None, + ): + super(JsonProperty, self).__init__( + name=name, + compressed=compressed, + indexed=indexed, + repeated=repeated, + required=required, + default=default, + choices=choices, + validator=validator, + verbose_name=verbose_name, + write_empty_list=write_empty_list, + ) + if json_type is not None: + self._json_type = json_type + + def _validate(self, value): + """Validate a ``value`` before setting it. + + Args: + value (Any): The value to check. + + Raises: + TypeError: If the current property has a JSON type set and + ``value`` is not an instance of that type. + """ + if self._json_type is None: + return + if not isinstance(value, self._json_type): + raise TypeError("JSON property must be a {}".format(self._json_type)) + + def _to_base_type(self, value): + """Convert a value to the "base" value type for this property. + + Args: + value (Any): The value to be converted. + + Returns: + bytes: The ``value``, JSON encoded as an ASCII byte string. + """ + as_str = json.dumps(value, separators=(",", ":"), ensure_ascii=True) + return as_str.encode("ascii") + + def _from_base_type(self, value): + """Convert a value from the "base" value type for this property. + + Args: + value (Union[bytes, str]): The value to be converted. + + Returns: + Any: The ``value`` (ASCII bytes or string) loaded as JSON. + """ + # We write and retrieve `bytes` normally, but for some reason get back + # `str` from a projection query. + if not isinstance(value, str): + value = value.decode("ascii") + return json.loads(value) + + +@functools.total_ordering +class User(object): + """Provides the email address, nickname, and ID for a Google Accounts user. + + .. note:: + + This class is a port of ``google.appengine.api.users.User``. + In the (legacy) Google App Engine standard environment, this + constructor relied on several environment variables to provide a + fallback for inputs. In particular: + + * ``AUTH_DOMAIN`` for the ``_auth_domain`` argument + * ``USER_EMAIL`` for the ``email`` argument + * ``USER_ID`` for the ``_user_id`` argument + * ``FEDERATED_IDENTITY`` for the (now removed) ``federated_identity`` + argument + * ``FEDERATED_PROVIDER`` for the (now removed) ``federated_provider`` + argument + + However in the gVisor Google App Engine runtime (e.g. Python 3.7), + none of these environment variables will be populated. + + .. note:: + + Previous versions of the Google Cloud Datastore API had an explicit + ``UserValue`` field. However, the ``google.datastore.v1`` API returns + previously stored user values as an ``Entity`` with the meaning set to + ``ENTITY_USER=20``. + + .. warning:: + + The ``federated_identity`` and ``federated_provider`` are + decommissioned and have been removed from the constructor. Additionally + ``_strict_mode`` has been removed from the constructor and the + ``federated_identity()`` and ``federated_provider()`` methods have been + removed from this class. + + Args: + email (str): The user's email address. + _auth_domain (str): The auth domain for the current application. + _user_id (str): The user ID. + + Raises: + ValueError: If the ``_auth_domain`` is not passed in. + UserNotFoundError: If ``email`` is empty. + """ + + def __init__(self, email=None, _auth_domain=None, _user_id=None): + if _auth_domain is None: + raise ValueError("_auth_domain is required") + + if not email: + raise UserNotFoundError + + self._auth_domain = _auth_domain + self._email = email + self._user_id = _user_id + + def nickname(self): + """The nickname for this user. + + A nickname is a human-readable string that uniquely identifies a Google + user with respect to this application, akin to a username. For some + users, this nickname is an email address or part of the email address. + + Returns: + str: The nickname of the user. + """ + if ( + self._email + and self._auth_domain + and self._email.endswith("@" + self._auth_domain) + ): + suffix_len = len(self._auth_domain) + 1 + return self._email[:-suffix_len] + else: + return self._email + + def email(self): + """Returns the user's email address.""" + return self._email + + def user_id(self): + """Obtains the user ID of the user. + + Returns: + Optional[str]: A permanent unique identifying string or + :data:`None`. If the email address was set explicitly, this will + return :data:`None`. + """ + return self._user_id + + def auth_domain(self): + """Obtains the user's authentication domain. + + Returns: + str: The authentication domain. This method is internal and + should not be used by client applications. + """ + return self._auth_domain + + @classmethod + def _from_ds_entity(cls, user_entity): + """Convert the user value to a datastore entity. + + Args: + user_entity (~google.cloud.datastore.entity.Entity): A user value + datastore entity. + """ + kwargs = { + "email": user_entity["email"], + "_auth_domain": user_entity["auth_domain"], + } + if "user_id" in user_entity: + kwargs["_user_id"] = user_entity["user_id"] + return cls(**kwargs) + + def __str__(self): + return str(self.nickname()) + + def __repr__(self): + values = ["email={!r}".format(self._email)] + if self._user_id: + values.append("_user_id={!r}".format(self._user_id)) + return "users.User({})".format(", ".join(values)) + + def __hash__(self): + return hash((self._email, self._auth_domain)) + + def __eq__(self, other): + if not isinstance(other, User): + return NotImplemented + + return self._email == other._email and self._auth_domain == other._auth_domain + + def __lt__(self, other): + if not isinstance(other, User): + return NotImplemented + + return (self._email, self._auth_domain) < ( + other._email, + other._auth_domain, + ) + + +class UserProperty(Property): + """A property that contains :class:`.User` values. + + .. warning:: + + This exists for backwards compatibility with existing Cloud Datastore + schemas only; storing :class:`.User` objects directly in Cloud + Datastore is not recommended. + + .. warning:: + + The ``auto_current_user`` and ``auto_current_user_add`` arguments are + no longer supported. + + .. note:: + + On Google App Engine standard, after saving a :class:`User` the user ID + would automatically be populated by the datastore, even if it wasn't + set in the :class:`User` value being stored. For example: + + .. code-block:: python + + >>> class Simple(ndb.Model): + ... u = ndb.UserProperty() + ... + >>> entity = Simple(u=users.User("user@example.com")) + >>> entity.u.user_id() is None + True + >>> + >>> entity.put() + >>> # Reload without the cached values + >>> entity = entity.key.get(use_cache=False, + ... use_global_cache=False) + >>> entity.u.user_id() + '...9174...' + + However in the gVisor Google App Engine runtime (e.g. Python 3.7), + this will behave differently. The user ID will only be stored if it + is manually set in the :class:`User` instance, either by the running + application or by retrieving a stored :class:`User` that already has + a user ID set. + + .. automethod:: _validate + .. automethod:: _prepare_for_put + + Args: + name (str): The name of the property. + auto_current_user (bool): Deprecated flag. When supported, if this flag + was set to :data:`True`, the property value would be set to the + currently signed-in user whenever the model instance is stored in + the datastore, overwriting the property's previous value. + This was useful for tracking which user modifies a model instance. + auto_current_user_add (bool): Deprecated flag. When supported, if this + flag was set to :data:`True`, the property value would be set to + the currently signed-in user he first time the model instance is + stored in the datastore, unless the property has already been + assigned a value. This was useful for tracking which user creates + a model instance, which may not be the same user that modifies it + later. + indexed (bool): Indicates if the value should be indexed. + repeated (bool): Indicates if this property is repeated, i.e. contains + multiple values. + required (bool): Indicates if this property is required on the given + model type. + default (bytes): The default value for this property. + choices (Iterable[bytes]): A container of allowed values for this + property. + validator (Callable[[~google.cloud.ndb.model.Property, Any], bool]): A + validator to be used to check values. + verbose_name (str): A longer, user-friendly name for this property. + write_empty_list (bool): Indicates if an empty list should be written + to the datastore. + + Raises: + NotImplementedError: If ``auto_current_user`` is provided. + NotImplementedError: If ``auto_current_user_add`` is provided. + """ + + _auto_current_user = False + _auto_current_user_add = False + + @utils.positional(2) + def __init__( + self, + name=None, + auto_current_user=None, + auto_current_user_add=None, + indexed=None, + repeated=None, + required=None, + default=None, + choices=None, + validator=None, + verbose_name=None, + write_empty_list=None, + ): + super(UserProperty, self).__init__( + name=name, + indexed=indexed, + repeated=repeated, + required=required, + default=default, + choices=choices, + validator=validator, + verbose_name=verbose_name, + write_empty_list=write_empty_list, + ) + if auto_current_user is not None: + raise exceptions.NoLongerImplementedError() + + if auto_current_user_add is not None: + raise exceptions.NoLongerImplementedError() + + def _validate(self, value): + """Validate a ``value`` before setting it. + + Args: + value (User): The value to check. + + Raises: + exceptions.BadValueError: If ``value`` is not a :class:`User`. + """ + # Might be GAE User or our own version + if type(value).__name__ != "User": + raise exceptions.BadValueError( + "In field {}, expected User, got {!r}".format(self._name, value) + ) + + def _prepare_for_put(self, entity): + """Pre-put hook + + This is a no-op. In previous versions of ``ndb``, this method + populated the value based on ``auto_current_user`` or + ``auto_current_user_add``, but these flags have been disabled. + + Args: + entity (Model): An entity with values. + """ + + def _to_base_type(self, value): + """Convert the user value to a datastore entity. + + Arguments: + value (User): The user value. + + Returns: + ~google.cloud.datastore.entity.Entity: The datastore entity. + """ + user_entity = ds_entity_module.Entity() + + # Set required fields. + user_entity["email"] = str(value.email()) + user_entity.exclude_from_indexes.add("email") + user_entity["auth_domain"] = str(value.auth_domain()) + user_entity.exclude_from_indexes.add("auth_domain") + # Set optional field. + user_id = value.user_id() + if user_id: + user_entity["user_id"] = str(user_id) + user_entity.exclude_from_indexes.add("user_id") + + return user_entity + + def _from_base_type(self, ds_entity): + """Convert the user value from a datastore entity. + + Arguments: + ds_entity (~google.cloud.datastore.entity.Entity): The datastore + entity. + + Returns: + User: The converted entity. + """ + return User._from_ds_entity(ds_entity) + + def _to_datastore(self, entity, data, prefix="", repeated=False): + """Override of :method:`Property._to_datastore`. + + We just need to set the meaning to indicate value is a User. + """ + keys = super(UserProperty, self)._to_datastore( + entity, data, prefix=prefix, repeated=repeated + ) + + for key in keys: + value = data.get(key) + if value: + data.setdefault("_meanings", {})[key] = ( + _MEANING_PREDEFINED_ENTITY_USER, + value, + ) + + +class KeyProperty(Property): + """A property that contains :class:`~google.cloud.ndb.key.Key` values. + + The constructor for :class:`KeyProperty` allows at most two positional + arguments. Any usage of :data:`None` as a positional argument will + be ignored. Any of the following signatures are allowed: + + .. testsetup:: key-property-constructor + + from google.cloud import ndb + + + class SimpleModel(ndb.Model): + pass + + .. doctest:: key-property-constructor + + >>> name = "my_value" + >>> ndb.KeyProperty(name) + KeyProperty('my_value') + >>> ndb.KeyProperty(SimpleModel) + KeyProperty(kind='SimpleModel') + >>> ndb.KeyProperty(name, SimpleModel) + KeyProperty('my_value', kind='SimpleModel') + >>> ndb.KeyProperty(SimpleModel, name) + KeyProperty('my_value', kind='SimpleModel') + + The type of the positional arguments will be used to determine their + purpose: a string argument is assumed to be the ``name`` and a + :class:`type` argument is assumed to be the ``kind`` (and checked that + the type is a subclass of :class:`Model`). + + .. automethod:: _validate + + Args: + name (str): The name of the property. + kind (Union[type, str]): The (optional) kind to be stored. If provided + as a positional argument, this must be a subclass of :class:`Model` + otherwise the kind name is sufficient. + indexed (bool): Indicates if the value should be indexed. + repeated (bool): Indicates if this property is repeated, i.e. contains + multiple values. + required (bool): Indicates if this property is required on the given + model type. + default (~google.cloud.ndb.key.Key): The default value for this property. + choices (Iterable[~google.cloud.ndb.key.Key]): A container of allowed values for this + property. + validator (Callable[[~google.cloud.ndb.model.Property, ~google.cloud.ndb.key.Key], bool]): A + validator to be used to check values. + verbose_name (str): A longer, user-friendly name for this property. + write_empty_list (bool): Indicates if an empty list should be written + to the datastore. + """ + + _kind = None + + def _handle_positional(wrapped): + @functools.wraps(wrapped) + def wrapper(self, *args, **kwargs): + for arg in args: + if isinstance(arg, str): + if "name" in kwargs: + raise TypeError("You can only specify name once") + + kwargs["name"] = arg + + elif isinstance(arg, type): + if "kind" in kwargs: + raise TypeError("You can only specify kind once") + + kwargs["kind"] = arg + + elif arg is not None: + raise TypeError("Unexpected positional argument: {!r}".format(arg)) + + return wrapped(self, **kwargs) + + wrapper._wrapped = wrapped + return wrapper + + @utils.positional(3) + @_handle_positional + def __init__( + self, + name=None, + kind=None, + indexed=None, + repeated=None, + required=None, + default=None, + choices=None, + validator=None, + verbose_name=None, + write_empty_list=None, + ): + if isinstance(kind, type) and issubclass(kind, Model): + kind = kind._get_kind() + + else: + if kind is not None and not isinstance(kind, str): + raise TypeError("Kind must be a Model class or a string") + + super(KeyProperty, self).__init__( + name=name, + indexed=indexed, + repeated=repeated, + required=required, + default=default, + choices=choices, + validator=validator, + verbose_name=verbose_name, + write_empty_list=write_empty_list, + ) + if kind is not None: + self._kind = kind + + def _constructor_info(self): + """Helper for :meth:`__repr__`. + + Yields: + Tuple[str, bool]: Pairs of argument name and a boolean indicating + if that argument is a keyword. + """ + yield "name", False + yield "kind", True + from_inspect = super(KeyProperty, self)._constructor_info() + for name, is_keyword in from_inspect: + if name in ("args", "name", "kind"): + continue + yield name, is_keyword + + def _validate(self, value): + """Validate a ``value`` before setting it. + + Args: + value (~google.cloud.ndb.key.Key): The value to check. + + Raises: + exceptions.BadValueError: If ``value`` is not a :class:`.Key`. + exceptions.BadValueError: If ``value`` is a partial :class:`.Key` (i.e. it + has no name or ID set). + exceptions.BadValueError: If the current property has an associated ``kind`` + and ``value`` does not match that kind. + """ + if not isinstance(value, Key): + raise exceptions.BadValueError( + "In field {}, expected Key, got {!r}".format(self._name, value) + ) + + # Reject incomplete keys. + if not value.id(): + raise exceptions.BadValueError( + "In field {}, expected complete Key, got {!r}".format(self._name, value) + ) + + # Verify kind if provided. + if self._kind is not None: + if value.kind() != self._kind: + raise exceptions.BadValueError( + "In field {}, expected Key with kind={!r}, got " + "{!r}".format(self._name, self._kind, value) + ) + + def _to_base_type(self, value): + """Convert a value to the "base" value type for this property. + + Args: + value (~key.Key): The value to be converted. + + Returns: + google.cloud.datastore.Key: The converted value. + + Raises: + TypeError: If ``value`` is not a :class:`~key.Key`. + """ + if not isinstance(value, key_module.Key): + raise TypeError( + "Cannot convert to datastore key, expected Key value; " + "received {}".format(value) + ) + return value._key + + def _from_base_type(self, value): + """Convert a value from the "base" value type for this property. + + Args: + value (google.cloud.datastore.Key): The value to be converted. + + Returns: + key.Key: The converted value. + """ + return key_module.Key._from_ds_key(value) + + +class BlobKeyProperty(Property): + """A property containing :class:`~google.cloud.ndb.model.BlobKey` values. + + .. automethod:: _validate + """ + + def _validate(self, value): + """Validate a ``value`` before setting it. + + Args: + value (~google.cloud.ndb.model.BlobKey): The value to check. + + Raises: + exceptions.BadValueError: If ``value`` is not a + :class:`~google.cloud.ndb.model.BlobKey`. + """ + if not isinstance(value, BlobKey): + raise exceptions.BadValueError( + "In field {}, expected BlobKey, got {!r}".format(self._name, value) + ) + + +class DateTimeProperty(Property): + """A property that contains :class:`~datetime.datetime` values. + + If ``tzinfo`` is not set, this property expects "naive" datetime stamps, + i.e. no timezone can be set. Furthermore, the assumption is that naive + datetime stamps represent UTC. + + If ``tzinfo`` is set, timestamps will be stored as UTC and converted back + to the timezone set by ``tzinfo`` when reading values back out. + + .. note:: + + Unlike Django, ``auto_now_add`` can be overridden by setting the + value before writing the entity. And unlike the legacy + ``google.appengine.ext.db``, ``auto_now`` does not supply a default + value. Also unlike legacy ``db``, when the entity is written, the + property values are updated to match what was written. Finally, beware + that this also updates the value in the in-process cache, **and** that + ``auto_now_add`` may interact weirdly with transaction retries (a retry + of a property with ``auto_now_add`` set will reuse the value that was + set on the first try). + + .. automethod:: _validate + .. automethod:: _prepare_for_put + + Args: + name (str): The name of the property. + auto_now (bool): Indicates that the property should be set to the + current datetime when an entity is created and whenever it is + updated. + auto_now_add (bool): Indicates that the property should be set to the + current datetime when an entity is created. + tzinfo (Optional[datetime.tzinfo]): If set, values read from Datastore + will be converted to this timezone. Otherwise, values will be + returned as naive datetime objects with an implied UTC timezone. + indexed (bool): Indicates if the value should be indexed. + repeated (bool): Indicates if this property is repeated, i.e. contains + multiple values. + required (bool): Indicates if this property is required on the given + model type. + default (~datetime.datetime): The default value for this property. + choices (Iterable[~datetime.datetime]): A container of allowed values + for this property. + validator (Callable[[~google.cloud.ndb.model.Property, Any], bool]): A + validator to be used to check values. + verbose_name (str): A longer, user-friendly name for this property. + write_empty_list (bool): Indicates if an empty list should be written + to the datastore. + + Raises: + ValueError: If ``repeated=True`` and ``auto_now=True``. + ValueError: If ``repeated=True`` and ``auto_now_add=True``. + """ + + _auto_now = False + _auto_now_add = False + _tzinfo = None + + @utils.positional(2) + def __init__( + self, + name=None, + auto_now=None, + auto_now_add=None, + tzinfo=None, + indexed=None, + repeated=None, + required=None, + default=None, + choices=None, + validator=None, + verbose_name=None, + write_empty_list=None, + ): + super(DateTimeProperty, self).__init__( + name=name, + indexed=indexed, + repeated=repeated, + required=required, + default=default, + choices=choices, + validator=validator, + verbose_name=verbose_name, + write_empty_list=write_empty_list, + ) + if self._repeated: + if auto_now: + raise ValueError( + "DateTimeProperty {} could use auto_now and be " + "repeated, but there would be no point.".format(self._name) + ) + elif auto_now_add: + raise ValueError( + "DateTimeProperty {} could use auto_now_add and be " + "repeated, but there would be no point.".format(self._name) + ) + if auto_now is not None: + self._auto_now = auto_now + if auto_now_add is not None: + self._auto_now_add = auto_now_add + if tzinfo is not None: + self._tzinfo = tzinfo + + def _validate(self, value): + """Validate a ``value`` before setting it. + + Args: + value (~datetime.datetime): The value to check. + + Raises: + exceptions.BadValueError: If ``value`` is not a :class:`~datetime.datetime`. + """ + if not isinstance(value, datetime.datetime): + raise exceptions.BadValueError( + "In field {}, expected datetime, got {!r}".format(self._name, value) + ) + + if self._tzinfo is None and value.tzinfo is not None: + raise exceptions.BadValueError( + "DatetimeProperty without tzinfo {} can only support naive " + "datetimes (presumed UTC). Please set tzinfo to support " + "alternate timezones.".format(self._name) + ) + + @staticmethod + def _now(): + """datetime.datetime: Return current datetime. + + Subclasses will override this to return different forms of "now". + """ + return datetime.datetime.utcnow() + + def _prepare_for_put(self, entity): + """Sets the current timestamp when "auto" is set. + + If one of the following scenarios occur + + * ``auto_now=True`` + * ``auto_now_add=True`` and the ``entity`` doesn't have a value set + + then this hook will run before the ``entity`` is ``put()`` into + the datastore. + + Args: + entity (Model): An entity with values. + """ + if self._auto_now or (self._auto_now_add and not self._has_value(entity)): + value = self._now() + self._store_value(entity, value) + + def _from_base_type(self, value): + """Convert a value from the "base" value type for this property. + + Args: + value (Union[int, datetime.datetime]): The value to be converted. + The value will be `int` for entities retrieved by a projection + query and is a timestamp as the number of nanoseconds since the + epoch. + + Returns: + Optional[datetime.datetime]: If ``tzinfo`` is set on this property, + the value converted to the timezone in ``tzinfo``. Otherwise + returns the value without ``tzinfo`` or ``None`` if value did + not have ``tzinfo`` set. + """ + if isinstance(value, int): + # Projection query, value is integer nanoseconds + seconds = value / 1e6 + value = datetime.datetime.fromtimestamp(seconds, pytz.utc) + + if self._tzinfo is not None: + if value.tzinfo is None: + value = value.replace(tzinfo=pytz.utc) + return value.astimezone(self._tzinfo) + + elif value.tzinfo is not None: + return value.replace(tzinfo=None) + + def _to_base_type(self, value): + """Convert a value to the "base" value type for this property. + + Args: + value (datetime.datetime): The value to be converted. + + Returns: + Optional[datetime.datetime]: The converted value. + + Raises: + TypeError: If ``value`` is not a :class:`~key.Key`. + """ + if self._tzinfo is not None and value.tzinfo is not None: + return value.astimezone(pytz.utc) + + +class DateProperty(DateTimeProperty): + """A property that contains :class:`~datetime.date` values. + + .. automethod:: _to_base_type + .. automethod:: _from_base_type + .. automethod:: _validate + """ + + def _validate(self, value): + """Validate a ``value`` before setting it. + + Args: + value (~datetime.date): The value to check. + + Raises: + exceptions.BadValueError: If ``value`` is not a :class:`~datetime.date`. + """ + if not isinstance(value, datetime.date): + raise exceptions.BadValueError( + "In field {}, expected date, got {!r}".format(self._name, value) + ) + + def _to_base_type(self, value): + """Convert a value to the "base" value type for this property. + + Args: + value (~datetime.date): The value to be converted. + + Returns: + ~datetime.datetime: The converted value: a datetime object with the + time set to ``00:00``. + + Raises: + TypeError: If ``value`` is not a :class:`~datetime.date`. + """ + if not isinstance(value, datetime.date): + raise TypeError( + "Cannot convert to datetime expected date value; " + "received {}".format(value) + ) + return datetime.datetime(value.year, value.month, value.day) + + def _from_base_type(self, value): + """Convert a value from the "base" value type for this property. + + Args: + value (~datetime.datetime): The value to be converted. + + Returns: + ~datetime.date: The converted value: the date that ``value`` + occurs on. + """ + return value.date() + + @staticmethod + def _now(): + """datetime.datetime: Return current date.""" + return datetime.datetime.utcnow().date() + + +class TimeProperty(DateTimeProperty): + """A property that contains :class:`~datetime.time` values. + + .. automethod:: _to_base_type + .. automethod:: _from_base_type + .. automethod:: _validate + """ + + def _validate(self, value): + """Validate a ``value`` before setting it. + + Args: + value (~datetime.time): The value to check. + + Raises: + exceptions.BadValueError: If ``value`` is not a :class:`~datetime.time`. + """ + if not isinstance(value, datetime.time): + raise exceptions.BadValueError( + "In field {}, expected time, got {!r}".format(self._name, value) + ) + + def _to_base_type(self, value): + """Convert a value to the "base" value type for this property. + + Args: + value (~datetime.time): The value to be converted. + + Returns: + ~datetime.datetime: The converted value: a datetime object with the + date set to ``1970-01-01``. + + Raises: + TypeError: If ``value`` is not a :class:`~datetime.time`. + """ + if not isinstance(value, datetime.time): + raise TypeError( + "Cannot convert to datetime expected time value; " + "received {}".format(value) + ) + return datetime.datetime( + 1970, + 1, + 1, + value.hour, + value.minute, + value.second, + value.microsecond, + ) + + def _from_base_type(self, value): + """Convert a value from the "base" value type for this property. + + Args: + value (~datetime.datetime): The value to be converted. + + Returns: + ~datetime.time: The converted value: the time that ``value`` + occurs at. + """ + return value.time() + + @staticmethod + def _now(): + """datetime.datetime: Return current time.""" + return datetime.datetime.utcnow().time() + + +class StructuredProperty(Property): + """A Property whose value is itself an entity. + + The values of the sub-entity are indexed and can be queried. + """ + + _model_class = None + _kwargs = None + + def __init__(self, model_class, name=None, **kwargs): + super(StructuredProperty, self).__init__(name=name, **kwargs) + if self._repeated: + if model_class._has_repeated: + raise TypeError( + "This StructuredProperty cannot use repeated=True " + "because its model class (%s) contains repeated " + "properties (directly or indirectly)." % model_class.__name__ + ) + self._model_class = model_class + + def _get_value(self, entity): + """Override _get_value() to *not* raise UnprojectedPropertyError. + + This is necessary because the projection must include both the + sub-entity and the property name that is projected (e.g. 'foo.bar' + instead of only 'foo'). In that case the original code would fail, + because it only looks for the property name ('foo'). Here we check for + a value, and only call the original code if the value is None. + """ + value = self._get_user_value(entity) + if value is None and entity._projection: + # Invoke super _get_value() to raise the proper exception. + return super(StructuredProperty, self)._get_value(entity) + return value + + def _get_for_dict(self, entity): + value = self._get_value(entity) + if self._repeated: + value = [v._to_dict() for v in value] + elif value is not None: + value = value._to_dict() + return value + + def __getattr__(self, attrname): + """Dynamically get a subproperty.""" + # Optimistically try to use the dict key. + prop = self._model_class._properties.get(attrname) + + # We're done if we have a hit and _code_name matches. + if prop is None or prop._code_name != attrname: + # Otherwise, use linear search looking for a matching _code_name. + for candidate in self._model_class._properties.values(): + if candidate._code_name == attrname: + prop = candidate + break + + if prop is None: + raise AttributeError( + "Model subclass %s has no attribute %s" + % (self._model_class.__name__, attrname) + ) + + prop_copy = copy.copy(prop) + prop_copy._name = self._name + "." + prop_copy._name + + # Cache the outcome, so subsequent requests for the same attribute + # name will get the copied property directly rather than going + # through the above motions all over again. + setattr(self, attrname, prop_copy) + + return prop_copy + + def _comparison(self, op, value): + if op != query_module._EQ_OP: + raise exceptions.BadFilterError("StructuredProperty filter can only use ==") + if not self._indexed: + raise exceptions.BadFilterError( + "Cannot query for unindexed StructuredProperty %s" % self._name + ) + # Import late to avoid circular imports. + from .query import ConjunctionNode, PostFilterNode + from .query import RepeatedStructuredPropertyPredicate + + if value is None: + from .query import ( + FilterNode, + ) # Import late to avoid circular imports. + + return FilterNode(self._name, op, value) + + value = self._do_validate(value) + filters = [] + match_keys = [] + for prop_name, prop in self._model_class._properties.items(): + subvalue = prop._get_value(value) + if prop._repeated: + if subvalue: # pragma: no branch + raise exceptions.BadFilterError( + "Cannot query for non-empty repeated property %s" % prop._name + ) + continue # pragma: NO COVER + + if subvalue is not None: # pragma: no branch + altprop = getattr(self, prop._code_name) + filt = altprop._comparison(op, subvalue) + filters.append(filt) + match_keys.append(prop._name) + + if not filters: + raise exceptions.BadFilterError( + "StructuredProperty filter without any values" + ) + + if len(filters) == 1: + return filters[0] + + if self._repeated: + entity_pb = _entity_to_protobuf(value) + predicate = RepeatedStructuredPropertyPredicate( + self._name, match_keys, entity_pb + ) + filters.append(PostFilterNode(predicate)) + + return ConjunctionNode(*filters) + + def _IN(self, value): + if not isinstance(value, (list, tuple, set, frozenset)): + raise exceptions.BadArgumentError( + "Expected list, tuple or set, got %r" % (value,) + ) + from .query import DisjunctionNode, FalseNode + + # Expand to a series of == filters. + filters = [self._comparison(query_module._EQ_OP, val) for val in value] + if not filters: + # DisjunctionNode doesn't like an empty list of filters. + # Running the query will still fail, but this matches the + # behavior of IN for regular properties. + return FalseNode() + else: + return DisjunctionNode(*filters) + + IN = _IN + + def _validate(self, value): + if isinstance(value, dict): + # A dict is assumed to be the result of a _to_dict() call. + return self._model_class(**value) + if not isinstance(value, self._model_class): + raise exceptions.BadValueError( + "In field {}, expected {} instance, got {!r}".format( + self._name, self._model_class.__name__, value.__class__ + ) + ) + + def _has_value(self, entity, rest=None): + """Check if entity has a value for this property. + + Basically, prop._has_value(self, ent, ['x', 'y']) is similar to + (prop._has_value(ent) and prop.x._has_value(ent.x) and + prop.x.y._has_value(ent.x.y)), assuming prop.x and prop.x.y exist. + + Args: + entity (ndb.Model): An instance of a model. + rest (list[str]): optional list of attribute names to check in + addition. + + Returns: + bool: True if the entity has a value for that property. + """ + ok = super(StructuredProperty, self)._has_value(entity) + if ok and rest: + value = self._get_value(entity) + if self._repeated: + if len(value) != 1: + raise RuntimeError( + "Failed to retrieve sub-entity of StructuredProperty" + " %s" % self._name + ) + subent = value[0] + else: + subent = value + + if subent is None: + return True + + subprop = subent._properties.get(rest[0]) + if subprop is None: + ok = False + else: + ok = subprop._has_value(subent, rest[1:]) + + return ok + + def _check_property(self, rest=None, require_indexed=True): + """Override for Property._check_property(). + + Raises: + InvalidPropertyError if no subproperty is specified or if something + is wrong with the subproperty. + """ + if not rest: + raise InvalidPropertyError( + "Structured property %s requires a subproperty" % self._name + ) + self._model_class._check_properties([rest], require_indexed=require_indexed) + + def _to_base_type(self, value): + """Convert a value to the "base" value type for this property. + + Args: + value: The given class value to be converted. + + Returns: + bytes + + Raises: + TypeError: If ``value`` is not the correct ``Model`` type. + """ + if not isinstance(value, self._model_class): + raise TypeError( + "Cannot convert to protocol buffer. Expected {} value; " + "received {}".format(self._model_class.__name__, value) + ) + return _entity_to_ds_entity(value, set_key=False) + + def _from_base_type(self, value): + """Convert a value from the "base" value type for this property. + Args: + value(~google.cloud.datastore.Entity or bytes): The value to be + converted. + Returns: + The converted value with given class. + """ + if isinstance(value, ds_entity_module.Entity): + value = _entity_from_ds_entity(value, model_class=self._model_class) + return value + + def _get_value_size(self, entity): + values = self._retrieve_value(entity, self._default) + if values is None: + return 0 + if not isinstance(values, list): + values = [values] + return len(values) + + def _to_datastore(self, entity, data, prefix="", repeated=False): + """Override of :method:`Property._to_datastore`. + + If ``legacy_data`` is ``True``, then we need to override the default + behavior to store everything in a single Datastore entity that uses + dotted attribute names, rather than nesting entities. + """ + # Avoid Python 2.7 circular import + from google.cloud.ndb import context as context_module + + context = context_module.get_context() + + # The easy way + if not context.legacy_data: + return super(StructuredProperty, self)._to_datastore( + entity, data, prefix=prefix, repeated=repeated + ) + + # The hard way + next_prefix = prefix + self._name + "." + next_repeated = repeated or self._repeated + keys = [] + + values = self._get_user_value(entity) + if not self._repeated: + values = (values,) + + if values: + props = tuple(_properties_of(*values)) + + for value in values: + if value is None: + keys.extend( + super(StructuredProperty, self)._to_datastore( + entity, data, prefix=prefix, repeated=repeated + ) + ) + continue + + for prop in props: + keys.extend( + prop._to_datastore( + value, data, prefix=next_prefix, repeated=next_repeated + ) + ) + + return set(keys) + + def _prepare_for_put(self, entity): + values = self._get_user_value(entity) + if not self._repeated: + values = [values] + for value in values: + if value is not None: + value._prepare_for_put() + + +class LocalStructuredProperty(BlobProperty): + """A property that contains ndb.Model value. + + .. note:: + Unlike most property types, a :class:`LocalStructuredProperty` + is **not** indexed. + .. automethod:: _to_base_type + .. automethod:: _from_base_type + .. automethod:: _validate + + Args: + model_class (type): The class of the property. (Must be subclass of + ``ndb.Model``.) + name (str): The name of the property. + compressed (bool): Indicates if the value should be compressed (via + ``zlib``). + repeated (bool): Indicates if this property is repeated, i.e. contains + multiple values. + required (bool): Indicates if this property is required on the given + model type. + default (Any): The default value for this property. + validator (Callable[[~google.cloud.ndb.model.Property, Any], bool]): A + validator to be used to check values. + verbose_name (str): A longer, user-friendly name for this property. + write_empty_list (bool): Indicates if an empty list should be written + to the datastore. + """ + + _model_class = None + _keep_keys = False + _kwargs = None + + def __init__(self, model_class, **kwargs): + indexed = kwargs.pop("indexed", False) + if indexed: + raise NotImplementedError( + "Cannot index LocalStructuredProperty {}.".format(self._name) + ) + keep_keys = kwargs.pop("keep_keys", False) + super(LocalStructuredProperty, self).__init__(**kwargs) + self._model_class = model_class + self._keep_keys = keep_keys + + def _validate(self, value): + """Validate a ``value`` before setting it. + Args: + value: The value to check. + Raises: + exceptions.BadValueError: If ``value`` is not a given class. + """ + if isinstance(value, dict): + # A dict is assumed to be the result of a _to_dict() call. + return self._model_class(**value) + + if not isinstance(value, self._model_class): + raise exceptions.BadValueError( + "In field {}, expected {}, got {!r}".format( + self._name, self._model_class.__name__, value + ) + ) + + def _get_for_dict(self, entity): + value = self._get_value(entity) + if self._repeated: + value = [v._to_dict() for v in value] + elif value is not None: + value = value._to_dict() + return value + + def _to_base_type(self, value): + """Convert a value to the "base" value type for this property. + Args: + value: The given class value to be converted. + Returns: + bytes + Raises: + TypeError: If ``value`` is not the correct ``Model`` type. + """ + if not isinstance(value, self._model_class): + raise TypeError( + "Cannot convert to bytes expected {} value; " + "received {}".format(self._model_class.__name__, value) + ) + return _entity_to_protobuf( + value, set_key=self._keep_keys + )._pb.SerializePartialToString() + + def _from_base_type(self, value): + """Convert a value from the "base" value type for this property. + Args: + value(~google.cloud.datastore.Entity or bytes): The value to be + converted. + Returns: + The converted value with given class. + """ + if isinstance(value, bytes): + pb = entity_pb2.Entity() + pb._pb.MergeFromString(value) + entity_value = helpers.entity_from_protobuf(pb) + if not entity_value.keys(): + # No properties. Maybe dealing with legacy pb format. + from google.cloud.ndb._legacy_entity_pb import EntityProto + + pb = EntityProto() + pb.MergePartialFromString(value) + entity_value.update(pb.entity_props()) + value = entity_value + if not self._keep_keys and value.key: + value.key = None + model_class = self._model_class + kind = self._model_class.__name__ + if "class" in value and value["class"]: + kind = value["class"][-1] or model_class + if kind != self._model_class.__name__: + # if this is a polymodel, find correct subclass. + model_class = Model._lookup_model(kind) + return _entity_from_ds_entity(value, model_class=model_class) + + def _prepare_for_put(self, entity): + values = self._get_user_value(entity) + if not self._repeated: + values = [values] + for value in values: + if value is not None: + value._prepare_for_put() + + def _to_datastore(self, entity, data, prefix="", repeated=False): + """Override of :method:`Property._to_datastore`. + + Although this property's entities should be stored as serialized + strings, when stored using old NDB they appear as unserialized + entities in the datastore. When serialized as strings in this class, + they can't be read by old NDB either. To avoid these incompatibilities, + we store them as entities when legacy_data is set to True, which is the + default behavior. + """ + # Avoid Python 2.7 circular import + from google.cloud.ndb import context as context_module + + context = context_module.get_context() + + keys = super(LocalStructuredProperty, self)._to_datastore( + entity, data, prefix=prefix, repeated=repeated + ) + + if context.legacy_data: + values = self._get_user_value(entity) + if not self._repeated: + values = [values] + legacy_values = [] + for value in values: + ds_entity = None + if value is not None: + ds_entity = _entity_to_ds_entity(value, set_key=self._keep_keys) + legacy_values.append(ds_entity) + if not self._repeated: + legacy_values = legacy_values[0] + data[self._name] = legacy_values + + return keys + + +class GenericProperty(Property): + """A Property whose value can be (almost) any basic type. + This is mainly used for Expando and for orphans (values present in + Cloud Datastore but not represented in the Model subclass) but can + also be used explicitly for properties with dynamically-typed + values. + + This supports compressed=True, which is only effective for str + values (not for unicode), and implies indexed=False. + """ + + _compressed = False + _kwargs = None + + def __init__(self, name=None, compressed=False, **kwargs): + if compressed: # Compressed implies unindexed. + kwargs.setdefault("indexed", False) + super(GenericProperty, self).__init__(name=name, **kwargs) + self._compressed = compressed + if compressed and self._indexed: + raise NotImplementedError( + "GenericProperty %s cannot be compressed and " + "indexed at the same time." % self._name + ) + + def _to_base_type(self, value): + if self._compressed and isinstance(value, bytes): + return _CompressedValue(zlib.compress(value)) + + def _from_base_type(self, value): + if isinstance(value, _CompressedValue): + return zlib.decompress(value.z_val) + + def _validate(self, value): + if self._indexed: + if isinstance(value, bytes) and len(value) > _MAX_STRING_LENGTH: + raise exceptions.BadValueError( + "Indexed value %s must be at most %d bytes" + % (self._name, _MAX_STRING_LENGTH) + ) + + +class ComputedProperty(GenericProperty): + """A Property whose value is determined by a user-supplied function. + Computed properties cannot be set directly, but are instead generated by a + function when required. They are useful to provide fields in Cloud + Datastore that can be used for filtering or sorting without having to + manually set the value in code - for example, sorting on the length of a + BlobProperty, or using an equality filter to check if another field is not + empty. ComputedProperty can be declared as a regular property, passing a + function as the first argument, or it can be used as a decorator for the + function that does the calculation. + + Example: + + >>> class DatastoreFile(ndb.Model): + ... name = ndb.model.StringProperty() + ... n_lower = ndb.model.ComputedProperty(lambda self: self.name.lower()) + ... + ... data = ndb.model.BlobProperty() + ... + ... @ndb.model.ComputedProperty + ... def size(self): + ... return len(self.data) + ... + ... def _compute_hash(self): + ... return hashlib.sha1(self.data).hexdigest() + ... hash = ndb.model.ComputedProperty(_compute_hash, name='sha1') + """ + + _kwargs = None + _func = None + + def __init__(self, func, name=None, indexed=None, repeated=None, verbose_name=None): + """Constructor. + + Args: + + func: A function that takes one argument, the model instance, and + returns a calculated value. + """ + super(ComputedProperty, self).__init__( + name=name, + indexed=indexed, + repeated=repeated, + verbose_name=verbose_name, + ) + self._func = func + + def _set_value(self, entity, value): + raise ComputedPropertyError("Cannot assign to a ComputedProperty") + + def _delete_value(self, entity): + raise ComputedPropertyError("Cannot delete a ComputedProperty") + + def _get_value(self, entity): + # About projections and computed properties: if the computed + # property itself is in the projection, don't recompute it; this + # prevents raising UnprojectedPropertyError if one of the + # dependents is not in the projection. However, if the computed + # property is not in the projection, compute it normally -- its + # dependents may all be in the projection, and it may be useful to + # access the computed value without having it in the projection. + # In this case, if any of the dependents is not in the projection, + # accessing it in the computation function will raise + # UnprojectedPropertyError which will just bubble up. + if entity._projection and self._name in entity._projection: + return super(ComputedProperty, self)._get_value(entity) + value = self._func(entity) + self._store_value(entity, value) + return value + + def _prepare_for_put(self, entity): + self._get_value(entity) # For its side effects. + + +class MetaModel(type): + """Metaclass for Model. + + This exists to fix up the properties -- they need to know their name. For + example, defining a model: + + .. code-block:: python + + class Book(ndb.Model): + pages = ndb.IntegerProperty() + + the ``Book.pages`` property doesn't have the name ``pages`` assigned. + This is accomplished by calling the ``_fix_up_properties()`` method on the + class itself. + """ + + def __init__(cls, name, bases, classdict): + super(MetaModel, cls).__init__(name, bases, classdict) + cls._fix_up_properties() + + def __repr__(cls): + props = [] + for _, prop in sorted(cls._properties.items()): + props.append("{}={!r}".format(prop._code_name, prop)) + return "{}<{}>".format(cls.__name__, ", ".join(props)) + + +class Model(_NotEqualMixin, metaclass=MetaModel): + """A class describing Cloud Datastore entities. + + Model instances are usually called entities. All model classes + inheriting from :class:`Model` automatically have :class:`MetaModel` as + their metaclass, so that the properties are fixed up properly after the + class is defined. + + Because of this, you cannot use the same :class:`Property` object to + describe multiple properties -- you must create separate :class:`Property` + objects for each property. For example, this does not work: + + .. code-block:: python + + reuse_prop = ndb.StringProperty() + + class Wrong(ndb.Model): + first = reuse_prop + second = reuse_prop + + instead each class attribute needs to be distinct: + + .. code-block:: python + + class NotWrong(ndb.Model): + first = ndb.StringProperty() + second = ndb.StringProperty() + + The "kind" for a given :class:`Model` subclass is normally equal to the + class name (exclusive of the module name or any other parent scope). To + override the kind, define :meth:`_get_kind`, as follows: + + .. code-block:: python + + class MyModel(ndb.Model): + @classmethod + def _get_kind(cls): + return "AnotherKind" + + A newly constructed entity will not be persisted to Cloud Datastore without + an explicit call to :meth:`put`. + + User-defined properties can be passed to the constructor via keyword + arguments: + + .. doctest:: model-keywords + + >>> class MyModel(ndb.Model): + ... value = ndb.FloatProperty() + ... description = ndb.StringProperty() + ... + >>> MyModel(value=7.34e22, description="Mass of the moon") + MyModel(description='Mass of the moon', value=7.34e+22) + + In addition to user-defined properties, there are seven accepted keyword + arguments: + + * ``key`` + * ``id`` + * ``app`` + * ``namespace`` + * ``database`` + * ``parent`` + * ``projection`` + + Of these, ``key`` is a public attribute on :class:`Model` instances: + + .. testsetup:: model-key + + from google.cloud import ndb + + + class MyModel(ndb.Model): + value = ndb.FloatProperty() + description = ndb.StringProperty() + + .. doctest:: model-key + + >>> entity1 = MyModel(id=11) + >>> entity1.key + Key('MyModel', 11) + >>> entity2 = MyModel(parent=entity1.key) + >>> entity2.key + Key('MyModel', 11, 'MyModel', None) + >>> entity3 = MyModel(key=ndb.Key(MyModel, "e-three")) + >>> entity3.key + Key('MyModel', 'e-three') + + However, a user-defined property can be defined on the model with the + same name as one of those keyword arguments. In this case, the user-defined + property "wins": + + .. doctest:: model-keyword-id-collision + + >>> class IDCollide(ndb.Model): + ... id = ndb.FloatProperty() + ... + >>> entity = IDCollide(id=17) + >>> entity + IDCollide(id=17.0) + >>> entity.key is None + True + + In such cases of argument "collision", an underscore can be used as a + keyword argument prefix: + + .. doctest:: model-keyword-id-collision + + >>> entity = IDCollide(id=17, _id=2009) + >>> entity + IDCollide(key=Key('IDCollide', 2009), id=17.0) + + For the **very** special case of a property named ``key``, the ``key`` + attribute will no longer be the entity's key but instead will be the + property value. Instead, the entity's key is accessible via ``_key``: + + .. doctest:: model-keyword-key-collision + + >>> class KeyCollide(ndb.Model): + ... key = ndb.StringProperty() + ... + >>> entity1 = KeyCollide(key="Take fork in road", id=987) + >>> entity1 + KeyCollide(_key=Key('KeyCollide', 987), key='Take fork in road') + >>> entity1.key + 'Take fork in road' + >>> entity1._key + Key('KeyCollide', 987) + >>> + >>> entity2 = KeyCollide(key="Go slow", _key=ndb.Key(KeyCollide, 1)) + >>> entity2 + KeyCollide(_key=Key('KeyCollide', 1), key='Go slow') + + The constructor accepts keyword arguments based on the properties + defined on model subclass. However, using keywords for nonexistent + or non-:class:`Property` class attributes will cause a failure: + + .. doctest:: model-keywords-fail + + >>> class Simple(ndb.Model): + ... marker = 1001 + ... some_name = ndb.StringProperty() + ... + >>> Simple(some_name="Value set here.") + Simple(some_name='Value set here.') + >>> Simple(some_name="Value set here.", marker=29) + Traceback (most recent call last): + ... + TypeError: Cannot set non-property marker + >>> Simple(some_name="Value set here.", missing=29) + Traceback (most recent call last): + ... + AttributeError: type object 'Simple' has no attribute 'missing' + + .. automethod:: _get_kind + + Args: + key (Key): Datastore key for this entity (kind must match this model). + If ``key`` is used, ``id`` and ``parent`` must be unset or + :data:`None`. + id (str): Key ID for this model. If ``id`` is used, ``key`` must be + :data:`None`. + parent (Key): The parent model or :data:`None` for a top-level model. + If ``parent`` is used, ``key`` must be :data:`None`. + namespace (str): Namespace for the entity key. + project (str): Project ID for the entity key. + app (str): DEPRECATED: Synonym for ``project``. + database (str): Database for the entity key. + kwargs (Dict[str, Any]): Additional keyword arguments. These should map + to properties of this model. + + Raises: + exceptions.BadArgumentError: If the constructor is called with ``key`` and one + of ``id``, ``app``, ``namespace``, ``database``, or ``parent`` specified. + """ + + # Class variables updated by _fix_up_properties() + _properties = None + _has_repeated = False + _kind_map = {} # Dict mapping {kind: Model subclass} + + # Defaults for instance variables. + _entity_key = None + _values = None + _projection = () # Tuple of names of projected properties. + + # Hardcoded pseudo-property for the key. + _key = ModelKey() + key = _key + """A special pseudo-property for key queries. + + For example: + + .. code-block:: python + + key = ndb.Key(MyModel, 808) + query = MyModel.query(MyModel.key > key) + + will create a query for the reserved ``__key__`` property. + """ + + def __setstate__(self, state): + if type(state) is dict: + # this is not a legacy pb. set __dict__ + self.__init__() + self.__dict__.update(state) + else: + # this is a legacy pickled object. We need to deserialize. + pb = _legacy_entity_pb.EntityProto() + pb.MergePartialFromString(state) + self.__init__() + self.__class__._from_pb(pb, set_key=False, ent=self) + + def __init__(_self, **kwargs): + # NOTE: We use ``_self`` rather than ``self`` so users can define a + # property named 'self'. + self = _self + key = self._get_arg(kwargs, "key") + id_ = self._get_arg(kwargs, "id") + project = self._get_arg(kwargs, "project") + app = self._get_arg(kwargs, "app") + database = self._get_arg(kwargs, "database", key_module.UNDEFINED) + namespace = self._get_arg(kwargs, "namespace", key_module.UNDEFINED) + parent = self._get_arg(kwargs, "parent") + projection = self._get_arg(kwargs, "projection") + + if app and project: + raise exceptions.BadArgumentError( + "Can't specify both 'app' and 'project'. They are synonyms." + ) + + if not project: + project = app + + key_parts_unspecified = ( + id_ is None + and parent is None + and project is None + and database is key_module.UNDEFINED + and namespace is key_module.UNDEFINED + ) + if key is not None: + if not key_parts_unspecified: + raise exceptions.BadArgumentError( + "Model constructor given 'key' does not accept " + "'id', 'project', 'app', 'namespace', 'database', or 'parent'." + ) + self._key = _validate_key(key, entity=self) + elif not key_parts_unspecified: + self._key = Key( + self._get_kind(), + id_, + parent=parent, + project=project, + database=database, + namespace=namespace, + ) + + self._values = {} + self._set_attributes(kwargs) + # Set the projection last, otherwise it will prevent _set_attributes(). + if projection: + self._set_projection(projection) + + def _get_property_for(self, p, indexed=True, depth=0): + """Internal helper to get the Property for a protobuf-level property.""" + if isinstance(p.name(), str): + p.set_name(bytes(p.name(), encoding="utf-8")) + parts = p.name().decode().split(".") + if len(parts) <= depth: + # Apparently there's an unstructured value here. + # Assume it is a None written for a missing value. + # (It could also be that a schema change turned an unstructured + # value into a structured one. In that case, too, it seems + # better to return None than to return an unstructured value, + # since the latter doesn't match the current schema.) + return None + next = parts[depth] + prop = self._properties.get(next) + if prop is None: + prop = self._fake_property(p, next, indexed) + return prop + + def _clone_properties(self): + """Relocate ``_properties`` from class to instance. + + Internal helper, in case properties need to be modified for an instance but not + the class. + """ + cls = type(self) + if self._properties is cls._properties: + self._properties = dict(cls._properties) + + def _fake_property(self, p, next, indexed=True): + """Internal helper to create a fake Property. Ported from legacy datastore""" + # A custom 'meaning' for compressed properties. + _MEANING_URI_COMPRESSED = "ZLIB" + self._clone_properties() + if p.name() != next.encode("utf-8") and not p.name().endswith( + b"." + next.encode("utf-8") + ): + prop = StructuredProperty(Expando, next) + prop._store_value(self, _BaseValue(Expando())) + else: + compressed = p.meaning_uri() == _MEANING_URI_COMPRESSED + prop = GenericProperty( + next, repeated=p.multiple(), indexed=indexed, compressed=compressed + ) + prop._code_name = next + self._properties[prop._name] = prop + return prop + + @classmethod + def _from_pb(cls, pb, set_key=True, ent=None, key=None): + """Internal helper, ported from GoogleCloudPlatform/datastore-ndb-python, + to create an entity from an EntityProto protobuf.""" + if not isinstance(pb, _legacy_entity_pb.EntityProto): + raise TypeError("pb must be a EntityProto; received %r" % pb) + if ent is None: + ent = cls() + + # A key passed in overrides a key in the pb. + if key is None and pb.key().path.element_size(): + # modern NDB expects strings. + if not isinstance(pb.key_.app_, str): # pragma: NO BRANCH + pb.key_.app_ = pb.key_.app_.decode() + if not isinstance(pb.key_.name_space_, str): # pragma: NO BRANCH + pb.key_.name_space_ = pb.key_.name_space_.decode() + + key = Key(reference=pb.key()) + # If set_key is not set, skip a trivial incomplete key. + if key is not None and (set_key or key.id() or key.parent()): + ent._key = key + + # NOTE(darke): Keep a map from (indexed, property name) to the property. + # This allows us to skip the (relatively) expensive call to + # _get_property_for for repeated fields. + _property_map = {} + projection = [] + for indexed, plist in ( + (True, pb.property_list()), + # (False, pb.raw_property_list()), + (False, pb.property_list()), + ): + for p in plist: + if p.meaning() == _legacy_entity_pb.Property.INDEX_VALUE: + projection.append(p.name().decode()) + property_map_key = (p.name(), indexed) + _property_map[property_map_key] = ent._get_property_for(p, indexed) + _property_map[property_map_key]._legacy_deserialize(ent, p) + + ent._set_projection(projection) + return ent + + @classmethod + def _get_arg(cls, kwargs, keyword, default=None): + """Parse keywords for fields that aren't user-defined properties. + + This is used to re-map special keyword arguments in the presence + of name collision. For example if ``id`` is a property on the current + :class:`Model`, then it may be desirable to pass ``_id`` (instead of + ``id``) to the constructor. + + If the argument is found as ``_{keyword}`` or ``{keyword}``, it will + be removed from ``kwargs``. + + Args: + kwargs (Dict[str, Any]): A keyword arguments dictionary. + keyword (str): A keyword to be converted. + default (Any): Returned if argument isn't found. + + Returns: + Optional[Any]: The ``keyword`` argument, if found, otherwise + ``default``. + """ + alt_keyword = "_" + keyword + if alt_keyword in kwargs: + return kwargs.pop(alt_keyword) + + if keyword in kwargs: + obj = getattr(cls, keyword, None) + if not isinstance(obj, Property) or isinstance(obj, ModelKey): + return kwargs.pop(keyword) + + return default + + def _set_attributes(self, kwargs): + """Set attributes from keyword arguments. + + Args: + kwargs (Dict[str, Any]): A keyword arguments dictionary. + """ + cls = type(self) + for name, value in kwargs.items(): + # NOTE: This raises an ``AttributeError`` for unknown properties + # and that is the intended behavior. + prop = getattr(cls, name) + if not isinstance(prop, Property): + raise TypeError("Cannot set non-property {}".format(name)) + prop._set_value(self, value) + + def __repr__(self): + """Return an unambiguous string representation of an entity.""" + by_args = [] + has_key_property = False + for prop in self._properties.values(): + if prop._code_name == "key": + has_key_property = True + + if not prop._has_value(self): + continue + + value = prop._retrieve_value(self) + if value is None: + arg_repr = "None" + elif prop._repeated: + arg_reprs = [prop._value_to_repr(sub_value) for sub_value in value] + arg_repr = "[{}]".format(", ".join(arg_reprs)) + else: + arg_repr = prop._value_to_repr(value) + + by_args.append("{}={}".format(prop._code_name, arg_repr)) + + by_args.sort() + + if self._key is not None: + if has_key_property: + entity_key_name = "_key" + else: + entity_key_name = "key" + by_args.insert(0, "{}={!r}".format(entity_key_name, self._key)) + + if self._projection: + by_args.append("_projection={!r}".format(self._projection)) + + return "{}({})".format(type(self).__name__, ", ".join(by_args)) + + @classmethod + def _get_kind(cls): + """str: Return the kind name for this class. + + This defaults to ``cls.__name__``; users may override this to give a + class a different name when stored in Google Cloud Datastore than the + name of the class. + """ + return cls.__name__ + + @classmethod + def _class_name(cls): + """A hook for PolyModel to override. + + For regular models and expandos this is just an alias for + _get_kind(). For PolyModel subclasses, it returns the class name + (as set in the 'class' attribute thereof), whereas _get_kind() + returns the kind (the class name of the root class of a specific + PolyModel hierarchy). + """ + return cls._get_kind() + + @classmethod + def _default_filters(cls): + """Return an iterable of filters that are always to be applied. + + This is used by PolyModel to quietly insert a filter for the + current class name. + """ + return () + + def __hash__(self): + """Not implemented hash function. + + Raises: + TypeError: Always, to emphasize that entities are mutable. + """ + raise TypeError("Model is mutable, so cannot be hashed.") + + def __eq__(self, other): + """Compare two entities of the same class for equality.""" + if type(other) is not type(self): + return NotImplemented + + if self._key != other._key: + return False + + return self._equivalent(other) + + def _equivalent(self, other): + """Compare two entities of the same class, excluding keys. + + Args: + other (Model): An entity of the same class. It is assumed that + the type and the key of ``other`` match the current entity's + type and key (and the caller is responsible for checking). + + Returns: + bool: Indicating if the current entity and ``other`` are + equivalent. + """ + if set(self._projection) != set(other._projection): + return False + + if len(self._properties) != len(other._properties): + return False # Can only happen for Expandos. + + prop_names = set(self._properties.keys()) + other_prop_names = set(other._properties.keys()) + if prop_names != other_prop_names: + return False # Again, only possible for Expandos + + # Restrict properties to the projection if set. + if self._projection: + prop_names = set(self._projection) + + for name in prop_names: + value = self._properties[name]._get_value(self) + if value != other._properties[name]._get_value(other): + return False + + return True + + def __lt__(self, value): + """The ``<`` comparison is not well-defined.""" + raise TypeError("Model instances are not orderable.") + + def __le__(self, value): + """The ``<=`` comparison is not well-defined.""" + raise TypeError("Model instances are not orderable.") + + def __gt__(self, value): + """The ``>`` comparison is not well-defined.""" + raise TypeError("Model instances are not orderable.") + + def __ge__(self, value): + """The ``>=`` comparison is not well-defined.""" + raise TypeError("Model instances are not orderable.") + + @classmethod + def _lookup_model(cls, kind, default_model=None): + """Get the model class for the given kind. + + Args: + kind (str): The name of the kind to look up. + default_model (Optional[type]): The model class to return if the + kind can't be found. + + Returns: + type: The model class for the requested kind or the default model. + + Raises: + .KindError: If the kind was not found and no ``default_model`` was + provided. + """ + model_class = cls._kind_map.get(kind, default_model) + if model_class is None: + raise KindError( + ( + "No model class found for the kind '{}'. Did you forget " + "to import it?" + ).format(kind) + ) + return model_class + + def _set_projection(self, projection): + """Set the projected properties for this instance. + + Args: + projection (Union[list, tuple]): An iterable of strings + representing the projection for the model instance. + """ + self._projection = tuple(projection) + + # Handle projections for structured properties by recursively setting + # projections on sub-entities. + by_prefix = {} + for name in projection: + if "." in name: + head, tail = name.split(".", 1) + by_prefix.setdefault(head, []).append(tail) + + for name, projection in by_prefix.items(): + prop = self._properties.get(name) + value = prop._get_user_value(self) + if prop._repeated: + for entity in value: + entity._set_projection(projection) + else: + value._set_projection(projection) + + @classmethod + def _check_properties(cls, property_names, require_indexed=True): + """Internal helper to check the given properties exist and meet + specified requirements. + + Called from query.py. + + Args: + property_names (list): List or tuple of property names -- each + being a string, possibly containing dots (to address subproperties + of structured properties). + + Raises: + InvalidPropertyError: if one of the properties is invalid. + AssertionError: if the argument is not a list or tuple of strings. + """ + assert isinstance(property_names, (list, tuple)), repr(property_names) + for name in property_names: + if "." in name: + name, rest = name.split(".", 1) + else: + rest = None + prop = cls._properties.get(name) + if prop is None: + raise InvalidPropertyError("Unknown property {}".format(name)) + else: + prop._check_property(rest, require_indexed=require_indexed) + + @classmethod + def _fix_up_properties(cls): + """Fix up the properties by calling their ``_fix_up()`` method. + + .. note:: + + This is called by :class:`MetaModel`, but may also be called + manually after dynamically updating a model class. + + Raises: + KindError: If the returned kind from ``_get_kind()`` is not a + :class:`str`. + TypeError: If a property on this model has a name beginning with + an underscore. + """ + kind = cls._get_kind() + if not isinstance(kind, str): + raise KindError( + "Class {} defines a ``_get_kind()`` method that returns " + "a non-string ({!r})".format(cls.__name__, kind) + ) + + cls._properties = {} + + # Skip the classes in ``ndb.model``. + if cls.__module__ == __name__: + return + + for name in dir(cls): + attr = getattr(cls, name, None) + if isinstance(attr, ModelAttribute) and not isinstance(attr, ModelKey): + if name.startswith("_"): + raise TypeError( + "ModelAttribute {} cannot begin with an underscore " + "character. ``_`` prefixed attributes are reserved " + "for temporary Model instance values.".format(name) + ) + attr._fix_up(cls, name) + if isinstance(attr, Property): + if attr._repeated or ( + isinstance(attr, StructuredProperty) + and attr._model_class._has_repeated + ): + cls._has_repeated = True + cls._properties[attr._name] = attr + + cls._update_kind_map() + + @classmethod + def _update_kind_map(cls): + """Update the kind map to include this class.""" + cls._kind_map[cls._get_kind()] = cls + + @staticmethod + def _validate_key(key): + """Validation for ``_key`` attribute (designed to be overridden). + + Args: + key (~google.cloud.ndb.key.Key): Proposed key to use for this entity. + + Returns: + ~google.cloud.ndb.key.Key: The validated ``key``. + """ + return key + + @classmethod + def _gql(cls, query_string, *args, **kwargs): + """Run a GQL query using this model as the FROM entity. + + Args: + query_string (str): The WHERE part of a GQL query (including the + WHERE keyword). + args: if present, used to call bind() on the query. + kwargs: if present, used to call bind() on the query. + + Returns: + :class:query.Query: A query instance. + """ + # import late to avoid circular import problems + from google.cloud.ndb import query + + gql = "SELECT * FROM {} {}".format(cls._class_name(), query_string) + return query.gql(gql, *args, **kwargs) + + gql = _gql + + @options_module.Options.options + @utils.keyword_only( + retries=None, + timeout=None, + deadline=None, + use_cache=None, + use_global_cache=None, + global_cache_timeout=None, + use_datastore=None, + use_memcache=None, + memcache_timeout=None, + max_memcache_items=None, + force_writes=None, + _options=None, + ) + @utils.positional(1) + def _put(self, **kwargs): + """Synchronously write this entity to Cloud Datastore. + + If the operation creates or completes a key, the entity's key + attribute is set to the new, complete key. + + Args: + retries (int): Number of times to retry this operation in the case + of transient server errors. Operation will potentially be tried + up to ``retries`` + 1 times. Set to ``0`` to try operation only + once, with no retries. + timeout (float): Override the gRPC timeout, in seconds. + deadline (float): DEPRECATED: Synonym for ``timeout``. + use_cache (bool): Specifies whether to store entities in in-process + cache; overrides in-process cache policy for this operation. + use_global_cache (bool): Specifies whether to store entities in + global cache; overrides global cache policy for this operation. + use_datastore (bool): Specifies whether to store entities in + Datastore; overrides Datastore policy for this operation. + global_cache_timeout (int): Maximum lifetime for entities in global + cache; overrides global cache timeout policy for this + operation. + use_memcache (bool): DEPRECATED: Synonym for ``use_global_cache``. + memcache_timeout (int): DEPRECATED: Synonym for + ``global_cache_timeout``. + max_memcache_items (int): No longer supported. + force_writes (bool): No longer supported. + + Returns: + key.Key: The key for the entity. This is always a complete key. + """ + return self._put_async(_options=kwargs["_options"]).result() + + put = _put + + @options_module.Options.options + @utils.keyword_only( + retries=None, + timeout=None, + deadline=None, + use_cache=None, + use_global_cache=None, + global_cache_timeout=None, + use_datastore=None, + use_memcache=None, + memcache_timeout=None, + max_memcache_items=None, + force_writes=None, + _options=None, + ) + @utils.positional(1) + def _put_async(self, **kwargs): + """Asynchronously write this entity to Cloud Datastore. + + If the operation creates or completes a key, the entity's key + attribute is set to the new, complete key. + + Args: + retries (int): Number of times to retry this operation in the case + of transient server errors. Operation will potentially be tried + up to ``retries`` + 1 times. Set to ``0`` to try operation only + once, with no retries. + timeout (float): Override the gRPC timeout, in seconds. + deadline (float): DEPRECATED: Synonym for ``timeout``. + use_cache (bool): Specifies whether to store entities in in-process + cache; overrides in-process cache policy for this operation. + use_global_cache (bool): Specifies whether to store entities in + global cache; overrides global cache policy for this operation. + use_datastore (bool): Specifies whether to store entities in + Datastore; overrides Datastore policy for this operation. + global_cache_timeout (int): Maximum lifetime for entities in global + cache; overrides global cache timeout policy for this + operation. + use_memcache (bool): DEPRECATED: Synonym for ``use_global_cache``. + memcache_timeout (int): DEPRECATED: Synonym for + ``global_cache_timeout``. + max_memcache_items (int): No longer supported. + force_writes (bool): No longer supported. + + Returns: + tasklets.Future: The eventual result will be the key for the + entity. This is always a complete key. + """ + # Avoid Python 2.7 circular import + from google.cloud.ndb import context as context_module + from google.cloud.ndb import _datastore_api + + self._pre_put_hook() + + @tasklets.tasklet + def put(self): + ds_entity = _entity_to_ds_entity(self) + ds_key = yield _datastore_api.put(ds_entity, kwargs["_options"]) + if ds_key: + self._key = key_module.Key._from_ds_key(ds_key) + + context = context_module.get_context() + if context._use_cache(self._key, kwargs["_options"]): + context.cache[self._key] = self + + raise tasklets.Return(self._key) + + self._prepare_for_put() + future = put(self) + future.add_done_callback(self._post_put_hook) + return future + + put_async = _put_async + + def _prepare_for_put(self): + if self._properties: + for prop in self._properties.values(): + prop._prepare_for_put(self) + + @classmethod + @utils.keyword_only( + distinct=False, + ancestor=None, + order_by=None, + orders=None, + project=None, + app=None, + namespace=None, + projection=None, + distinct_on=None, + group_by=None, + default_options=None, + ) + def _query(cls, *filters, **kwargs): + """Generate a query for this class. + + Args: + *filters (query.FilterNode): Filters to apply to this query. + distinct (Optional[bool]): Setting this to :data:`True` is + shorthand for setting `distinct_on` to `projection`. + ancestor (key.Key): Entities returned will be descendants of + `ancestor`. + order_by (list[Union[str, google.cloud.ndb.model.Property]]): + The model properties used to order query results. + orders (list[Union[str, google.cloud.ndb.model.Property]]): + Deprecated. Synonym for `order_by`. + project (str): The project to perform the query in. Also known as + the app, in Google App Engine. If not passed, uses the + client's value. + app (str): Deprecated. Synonym for `project`. + namespace (str): The namespace to which to restrict results. + If not passed, uses the client's value. + projection (list[str]): The fields to return as part of the + query results. + distinct_on (list[str]): The field names used to group query + results. + group_by (list[str]): Deprecated. Synonym for distinct_on. + default_options (QueryOptions): QueryOptions object. + """ + # Validating distinct + if kwargs["distinct"]: + if kwargs["distinct_on"]: + raise TypeError("Cannot use `distinct` and `distinct_on` together.") + + if kwargs["group_by"]: + raise TypeError("Cannot use `distinct` and `group_by` together.") + + if not kwargs["projection"]: + raise TypeError("Cannot use `distinct` without `projection`.") + + kwargs["distinct_on"] = kwargs["projection"] + + # Avoid circular import + from google.cloud.ndb import query as query_module + + query = query_module.Query( + kind=cls._get_kind(), + ancestor=kwargs["ancestor"], + order_by=kwargs["order_by"], + orders=kwargs["orders"], + project=kwargs["project"], + app=kwargs["app"], + namespace=kwargs["namespace"], + projection=kwargs["projection"], + distinct_on=kwargs["distinct_on"], + group_by=kwargs["group_by"], + default_options=kwargs["default_options"], + ) + query = query.filter(*cls._default_filters()) + query = query.filter(*filters) + return query + + query = _query + + @classmethod + @options_module.Options.options + @utils.positional(4) + def _allocate_ids( + cls, + size=None, + max=None, + parent=None, + retries=None, + timeout=None, + deadline=None, + use_cache=None, + use_global_cache=None, + global_cache_timeout=None, + use_datastore=None, + use_memcache=None, + memcache_timeout=None, + max_memcache_items=None, + force_writes=None, + _options=None, + ): + """Allocates a range of key IDs for this model class. + + Args: + size (int): Number of IDs to allocate. Must be specified. + max (int): Maximum ID to allocated. This feature is no longer + supported. You must always specify ``size``. + parent (key.Key): Parent key for which the IDs will be allocated. + retries (int): Number of times to retry this operation in the case + of transient server errors. Operation will potentially be tried + up to ``retries`` + 1 times. Set to ``0`` to try operation only + once, with no retries. + timeout (float): Override the gRPC timeout, in seconds. + deadline (float): DEPRECATED: Synonym for ``timeout``. + use_cache (bool): Specifies whether to store entities in in-process + cache; overrides in-process cache policy for this operation. + use_global_cache (bool): Specifies whether to store entities in + global cache; overrides global cache policy for this operation. + use_datastore (bool): Specifies whether to store entities in + Datastore; overrides Datastore policy for this operation. + global_cache_timeout (int): Maximum lifetime for entities in global + cache; overrides global cache timeout policy for this + operation. + use_memcache (bool): DEPRECATED: Synonym for ``use_global_cache``. + memcache_timeout (int): DEPRECATED: Synonym for + ``global_cache_timeout``. + max_memcache_items (int): No longer supported. + force_writes (bool): No longer supported. + + Returns: + tuple(key.Key): Keys for the newly allocated IDs. + """ + future = cls._allocate_ids_async(size, max, parent, _options=_options) + return future.result() + + allocate_ids = _allocate_ids + + @classmethod + @options_module.Options.options + @utils.positional(4) + def _allocate_ids_async( + cls, + size=None, + max=None, + parent=None, + retries=None, + timeout=None, + deadline=None, + use_cache=None, + use_global_cache=None, + global_cache_timeout=None, + use_datastore=None, + use_memcache=None, + memcache_timeout=None, + max_memcache_items=None, + force_writes=None, + _options=None, + ): + """Allocates a range of key IDs for this model class. + + Args: + size (int): Number of IDs to allocate. Must be specified. + max (int): Maximum ID to allocated. This feature is no longer + supported. You must always specify ``size``. + parent (key.Key): Parent key for which the IDs will be allocated. + retries (int): Number of times to retry this operation in the case + of transient server errors. Operation will potentially be tried + up to ``retries`` + 1 times. Set to ``0`` to try operation only + once, with no retries. + timeout (float): Override the gRPC timeout, in seconds. + deadline (float): DEPRECATED: Synonym for ``timeout``. + use_cache (bool): Specifies whether to store entities in in-process + cache; overrides in-process cache policy for this operation. + use_global_cache (bool): Specifies whether to store entities in + global cache; overrides global cache policy for this operation. + use_datastore (bool): Specifies whether to store entities in + Datastore; overrides Datastore policy for this operation. + global_cache_timeout (int): Maximum lifetime for entities in global + cache; overrides global cache timeout policy for this + operation. + use_memcache (bool): DEPRECATED: Synonym for ``use_global_cache``. + memcache_timeout (int): DEPRECATED: Synonym for + ``global_cache_timeout``. + max_memcache_items (int): No longer supported. + force_writes (bool): No longer supported. + + Returns: + tasklets.Future: Eventual result is ``tuple(key.Key)``: Keys for + the newly allocated IDs. + """ + # Avoid Python 2.7 circular import + from google.cloud.ndb import _datastore_api + + if max: + raise NotImplementedError( + "The 'max' argument to 'allocate_ids' is no longer supported. " + "There is no support for it in the Google Datastore backend " + "service." + ) + + if not size: + raise TypeError("Must pass non-zero 'size' to 'allocate_ids'") + + @tasklets.tasklet + def allocate_ids(): + cls._pre_allocate_ids_hook(size, max, parent) + kind = cls._get_kind() + keys = [key_module.Key(kind, None, parent=parent)._key for _ in range(size)] + key_pbs = yield _datastore_api.allocate(keys, _options) + keys = tuple( + ( + key_module.Key._from_ds_key(helpers.key_from_protobuf(key_pb)) + for key_pb in key_pbs + ) + ) + raise tasklets.Return(keys) + + future = allocate_ids() + future.add_done_callback( + functools.partial(cls._post_allocate_ids_hook, size, max, parent) + ) + return future + + allocate_ids_async = _allocate_ids_async + + @classmethod + @options_module.ReadOptions.options + @utils.positional(6) + def _get_by_id( + cls, + id, + parent=None, + namespace=None, + project=None, + app=None, + read_consistency=None, + read_policy=None, + transaction=None, + retries=None, + timeout=None, + deadline=None, + use_cache=None, + use_global_cache=None, + global_cache_timeout=None, + use_datastore=None, + use_memcache=None, + memcache_timeout=None, + max_memcache_items=None, + force_writes=None, + _options=None, + database=None, + ): + """Get an instance of Model class by ID. + + This really just a shorthand for ``Key(cls, id, ....).get()``. + + Args: + id (Union[int, str]): ID of the entity to load. + parent (Optional[key.Key]): Key for the parent of the entity to + load. + namespace (Optional[str]): Namespace for the entity to load. If not + passed, uses the client's value. + project (Optional[str]): Project id for the entity to load. If not + passed, uses the client's value. + app (str): DEPRECATED: Synonym for `project`. + read_consistency: Set this to ``ndb.EVENTUAL`` if, instead of + waiting for the Datastore to finish applying changes to all + returned results, you wish to get possibly-not-current results + faster. You can't do this if using a transaction. + read_policy: DEPRECATED: Synonym for ``read_consistency``. + transaction (bytes): Any results returned will be consistent with + the Datastore state represented by this transaction id. + Defaults to the currently running transaction. Cannot be used + with ``read_consistency=ndb.EVENTUAL``. + retries (int): Number of times to retry this operation in the case + of transient server errors. Operation will potentially be tried + up to ``retries`` + 1 times. Set to ``0`` to try operation only + once, with no retries. + timeout (float): Override the gRPC timeout, in seconds. + deadline (float): DEPRECATED: Synonym for ``timeout``. + use_cache (bool): Specifies whether to store entities in in-process + cache; overrides in-process cache policy for this operation. + use_global_cache (bool): Specifies whether to store entities in + global cache; overrides global cache policy for this operation. + use_datastore (bool): Specifies whether to store entities in + Datastore; overrides Datastore policy for this operation. + global_cache_timeout (int): Maximum lifetime for entities in global + cache; overrides global cache timeout policy for this + operation. + use_memcache (bool): DEPRECATED: Synonym for ``use_global_cache``. + memcache_timeout (int): DEPRECATED: Synonym for + ``global_cache_timeout``. + max_memcache_items (int): No longer supported. + force_writes (bool): No longer supported. + database (Optional[str]): This parameter is ignored. Please set the database on the Client instead. + + Returns: + Optional[Model]: The retrieved entity, if one is found. + """ + return cls._get_by_id_async( + id, + parent=parent, + namespace=namespace, + project=project, + app=app, + _options=_options, + database=database, + ).result() + + get_by_id = _get_by_id + + @classmethod + @options_module.ReadOptions.options + @utils.positional(6) + def _get_by_id_async( + cls, + id, + parent=None, + namespace=None, + project=None, + app=None, + read_consistency=None, + read_policy=None, + transaction=None, + retries=None, + timeout=None, + deadline=None, + use_cache=None, + use_global_cache=None, + global_cache_timeout=None, + use_datastore=None, + use_memcache=None, + memcache_timeout=None, + max_memcache_items=None, + force_writes=None, + _options=None, + database: str = None, + ): + """Get an instance of Model class by ID. + + This is the asynchronous version of :meth:`get_by_id`. + + Args: + id (Union[int, str]): ID of the entity to load. + parent (Optional[key.Key]): Key for the parent of the entity to + load. + namespace (Optional[str]): Namespace for the entity to load. If not + passed, uses the client's value. + project (Optional[str]): Project id for the entity to load. If not + passed, uses the client's value. + app (str): DEPRECATED: Synonym for `project`. + read_consistency: Set this to ``ndb.EVENTUAL`` if, instead of + waiting for the Datastore to finish applying changes to all + returned results, you wish to get possibly-not-current results + faster. You can't do this if using a transaction. + read_policy: DEPRECATED: Synonym for ``read_consistency``. + transaction (bytes): Any results returned will be consistent with + the Datastore state represented by this transaction id. + Defaults to the currently running transaction. Cannot be used + with ``read_consistency=ndb.EVENTUAL``. + retries (int): Number of times to retry this operation in the case + of transient server errors. Operation will potentially be tried + up to ``retries`` + 1 times. Set to ``0`` to try operation only + once, with no retries. + timeout (float): Override the gRPC timeout, in seconds. + deadline (float): DEPRECATED: Synonym for ``timeout``. + use_cache (bool): Specifies whether to store entities in in-process + cache; overrides in-process cache policy for this operation. + use_global_cache (bool): Specifies whether to store entities in + global cache; overrides global cache policy for this operation. + use_datastore (bool): Specifies whether to store entities in + Datastore; overrides Datastore policy for this operation. + global_cache_timeout (int): Maximum lifetime for entities in global + cache; overrides global cache timeout policy for this + operation. + use_memcache (bool): DEPRECATED: Synonym for ``use_global_cache``. + memcache_timeout (int): DEPRECATED: Synonym for + ``global_cache_timeout``. + max_memcache_items (int): No longer supported. + force_writes (bool): No longer supported. + database (Optional[str]): This parameter is ignored. Please set the database on the Client instead. + + Returns: + tasklets.Future: Optional[Model]: The retrieved entity, if one is + found. + """ + if app: + if project: + raise TypeError("Can't pass 'app' and 'project' arguments together.") + + project = app + + # Key class is weird about keyword args. If you want it to use defaults + # you have to not pass them at all. + key_args = {} + + if project: + key_args["app"] = project + + if namespace is not None: + key_args["namespace"] = namespace + + key = key_module.Key(cls._get_kind(), id, parent=parent, **key_args) + return key.get_async(_options=_options) + + get_by_id_async = _get_by_id_async + + @classmethod + @options_module.ReadOptions.options_or_model_properties + @utils.positional(6) + def _get_or_insert(_cls, _name, *args, **kwargs): + """Transactionally retrieves an existing entity or creates a new one. + + Will attempt to look up an entity with the given ``name`` and + ``parent``. If none is found a new entity will be created using the + given ``name`` and ``parent``, and passing any ``kw_model_args`` to the + constructor the ``Model`` class. + + If not already in a transaction, a new transaction will be created and + this operation will be run in that transaction. + + Args: + name (str): Name of the entity to load or create. + parent (Optional[key.Key]): Key for the parent of the entity to + load. + namespace (Optional[str]): Namespace for the entity to load. If not + passed, uses the client's value. + project (Optional[str]): Project id for the entity to load. If not + passed, uses the client's value. + app (str): DEPRECATED: Synonym for `project`. + **kw_model_args: Keyword arguments to pass to the constructor of + the model class if an instance for the specified key name does + not already exist. If an instance with the supplied ``name`` + and ``parent`` already exists, these arguments will be + discarded. + read_consistency: Set this to ``ndb.EVENTUAL`` if, instead of + waiting for the Datastore to finish applying changes to all + returned results, you wish to get possibly-not-current results + faster. You can't do this if using a transaction. + read_policy: DEPRECATED: Synonym for ``read_consistency``. + transaction (bytes): Any results returned will be consistent with + the Datastore state represented by this transaction id. + Defaults to the currently running transaction. Cannot be used + with ``read_consistency=ndb.EVENTUAL``. + retries (int): Number of times to retry this operation in the case + of transient server errors. Operation will potentially be tried + up to ``retries`` + 1 times. Set to ``0`` to try operation only + once, with no retries. + timeout (float): Override the gRPC timeout, in seconds. + deadline (float): DEPRECATED: Synonym for ``timeout``. + use_cache (bool): Specifies whether to store entities in in-process + cache; overrides in-process cache policy for this operation. + use_global_cache (bool): Specifies whether to store entities in + global cache; overrides global cache policy for this operation. + use_datastore (bool): Specifies whether to store entities in + Datastore; overrides Datastore policy for this operation. + global_cache_timeout (int): Maximum lifetime for entities in global + cache; overrides global cache timeout policy for this + operation. + use_memcache (bool): DEPRECATED: Synonym for ``use_global_cache``. + memcache_timeout (int): DEPRECATED: Synonym for + ``global_cache_timeout``. + max_memcache_items (int): No longer supported. + force_writes (bool): No longer supported. + + Returns: + Model: The entity that was either just retrieved or created. + """ + return _cls._get_or_insert_async(_name, *args, **kwargs).result() + + get_or_insert = _get_or_insert + + @classmethod + @options_module.ReadOptions.options_or_model_properties + @utils.positional(6) + def _get_or_insert_async(_cls, _name, *args, **kwargs): + """Transactionally retrieves an existing entity or creates a new one. + + This is the asynchronous version of :meth:``_get_or_insert``. + + Args: + name (str): Name of the entity to load or create. + parent (Optional[key.Key]): Key for the parent of the entity to + load. + namespace (Optional[str]): Namespace for the entity to load. If not + passed, uses the client's value. + project (Optional[str]): Project id for the entity to load. If not + passed, uses the client's value. + app (str): DEPRECATED: Synonym for `project`. + **kw_model_args: Keyword arguments to pass to the constructor of + the model class if an instance for the specified key name does + not already exist. If an instance with the supplied ``name`` + and ``parent`` already exists, these arguments will be + discarded. + read_consistency: Set this to ``ndb.EVENTUAL`` if, instead of + waiting for the Datastore to finish applying changes to all + returned results, you wish to get possibly-not-current results + faster. You can't do this if using a transaction. + read_policy: DEPRECATED: Synonym for ``read_consistency``. + transaction (bytes): Any results returned will be consistent with + the Datastore state represented by this transaction id. + Defaults to the currently running transaction. Cannot be used + with ``read_consistency=ndb.EVENTUAL``. + retries (int): Number of times to retry this operation in the case + of transient server errors. Operation will potentially be tried + up to ``retries`` + 1 times. Set to ``0`` to try operation only + once, with no retries. + timeout (float): Override the gRPC timeout, in seconds. + deadline (float): DEPRECATED: Synonym for ``timeout``. + use_cache (bool): Specifies whether to store entities in in-process + cache; overrides in-process cache policy for this operation. + use_global_cache (bool): Specifies whether to store entities in + global cache; overrides global cache policy for this operation. + use_datastore (bool): Specifies whether to store entities in + Datastore; overrides Datastore policy for this operation. + global_cache_timeout (int): Maximum lifetime for entities in global + cache; overrides global cache timeout policy for this + operation. + use_memcache (bool): DEPRECATED: Synonym for ``use_global_cache``. + memcache_timeout (int): DEPRECATED: Synonym for + ``global_cache_timeout``. + max_memcache_items (int): No longer supported. + force_writes (bool): No longer supported. + + Returns: + tasklets.Future: Model: The entity that was either just retrieved + or created. + """ + name = _name + parent = _cls._get_arg(kwargs, "parent") + namespace = _cls._get_arg(kwargs, "namespace") + app = _cls._get_arg(kwargs, "app") + project = _cls._get_arg(kwargs, "project") + options = kwargs.pop("_options") + + if not isinstance(name, str): + raise TypeError("'name' must be a string; received {!r}".format(name)) + + elif not name: + raise TypeError("'name' must not be an empty string.") + + if app: + if project: + raise TypeError("Can't pass 'app' and 'project' arguments together.") + + project = app + + # Key class is weird about keyword args. If you want it to use defaults + # you have to not pass them at all. + key_args = {} + + if project: + key_args["app"] = project + + if namespace is not None: + key_args["namespace"] = namespace + + key = key_module.Key(_cls._get_kind(), name, parent=parent, **key_args) + + @tasklets.tasklet + def get_or_insert(): + @tasklets.tasklet + def insert(): + entity = _cls(**kwargs) + entity._key = key + yield entity.put_async(_options=options) + + raise tasklets.Return(entity) + + # We don't need to start a transaction just to check if the entity + # exists already + entity = yield key.get_async(_options=options) + if entity is not None: + raise tasklets.Return(entity) + + if _transaction.in_transaction(): + entity = yield insert() + + else: + entity = yield _transaction.transaction_async(insert) + + raise tasklets.Return(entity) + + return get_or_insert() + + get_or_insert_async = _get_or_insert_async + + def _populate(self, **kwargs): + """Populate an instance from keyword arguments. + + Each keyword argument will be used to set a corresponding property. + Each keyword must refer to a valid property name. This is similar to + passing keyword arguments to the ``Model`` constructor, except that no + provision for key, id, or parent are made. + + Arguments: + **kwargs: Keyword arguments corresponding to properties of this + model class. + """ + self._set_attributes(kwargs) + + populate = _populate + + def _has_complete_key(self): + """Return whether this entity has a complete key. + + Returns: + bool: :data:``True`` if and only if entity has a key and that key + has a name or an id. + """ + return self._key is not None and self._key.id() is not None + + has_complete_key = _has_complete_key + + @utils.positional(2) + def _to_dict(self, include=None, exclude=None): + """Return a ``dict`` containing the entity's property values. + + Arguments: + include (Optional[Union[list, tuple, set]]): Set of property names + to include. Default is to include all names. + exclude (Optional[Union[list, tuple, set]]): Set of property names + to exclude. Default is to not exclude any names. + """ + values = {} + for prop in self._properties.values(): + name = prop._code_name + if include is not None and name not in include: + continue + if exclude is not None and name in exclude: + continue + + try: + values[name] = prop._get_for_dict(self) + except UnprojectedPropertyError: + # Ignore unprojected property errors, rather than failing + pass + + return values + + to_dict = _to_dict + + @classmethod + def _code_name_from_stored_name(cls, name): + """Return the code name from a property when it's different from the + stored name. Used in deserialization from datastore.""" + if name in cls._properties: + return cls._properties[name]._code_name + + # If name isn't in cls._properties but there is a property with that + # name, it means that property has a different codename, and returning + # this name will potentially clobber the real property. Take for + # example: + # + # class SomeKind(ndb.Model): + # foo = ndb.IntegerProperty(name="bar") + # + # If we are passed "bar", we know to translate that to "foo", because + # the datastore property, "bar", is the NDB property, "foo". But if we + # are passed "foo", here, then that must be the datastore property, + # "foo", which isn't even mapped to anything in the NDB model. + # + prop = getattr(cls, name, None) + if prop: + # Won't map to a property, so this datastore property will be + # effectively ignored. + return " " + + return name + + @classmethod + def _pre_allocate_ids_hook(cls, size, max, parent): + pass + + @classmethod + def _post_allocate_ids_hook(cls, size, max, parent, future): + pass + + @classmethod + def _pre_delete_hook(self, key): + pass + + @classmethod + def _post_delete_hook(self, key, future): + pass + + @classmethod + def _pre_get_hook(self, key): + pass + + @classmethod + def _post_get_hook(self, key, future): + pass + + @classmethod + def _pre_put_hook(self): + pass + + @classmethod + def _post_put_hook(self, future): + pass + + +class Expando(Model): + """Model subclass to support dynamic Property names and types. + + Sometimes the set of properties is not known ahead of time. In such + cases you can use the Expando class. This is a Model subclass that + creates properties on the fly, both upon assignment and when loading + an entity from Cloud Datastore. For example:: + + >>> class SuperPerson(Expando): + name = StringProperty() + superpower = StringProperty() + + >>> razorgirl = SuperPerson(name='Molly Millions', + superpower='bionic eyes, razorblade hands', + rasta_name='Steppin\' Razor', + alt_name='Sally Shears') + >>> elastigirl = SuperPerson(name='Helen Parr', + superpower='stretchable body') + >>> elastigirl.max_stretch = 30 # Meters + + >>> print(razorgirl._properties.keys()) + ['rasta_name', 'name', 'superpower', 'alt_name'] + >>> print(elastigirl._properties) + {'max_stretch': GenericProperty('max_stretch'), + 'name': StringProperty('name'), + 'superpower': StringProperty('superpower')} + + Note: You can inspect the properties of an expando instance using the + _properties attribute, as shown above. This property exists for plain Model + instances too; it is just not as interesting for those. + """ + + # Set this to False (in an Expando subclass or entity) to make + # properties default to unindexed. + _default_indexed = True + + # Set this to True to write [] to Cloud Datastore instead of no property + _write_empty_list_for_dynamic_properties = None + + def _set_attributes(self, kwds): + for name, value in kwds.items(): + setattr(self, name, value) + + def __getattr__(self, name): + prop = self._properties.get(name) + if prop is None: + return super(Expando, self).__getattribute__(name) + return prop._get_value(self) + + def __setattr__(self, name, value): + if ( + name.startswith("_") + or isinstance(getattr(self.__class__, name, None), (Property, property)) + or isinstance(self._properties.get(name, None), (Property, property)) + ): + return super(Expando, self).__setattr__(name, value) + + if "." in name: + # Legacy structured property + supername, subname = name.split(".", 1) + supervalue = getattr(self, supername, None) + if isinstance(supervalue, Expando): + return setattr(supervalue, subname, value) + return setattr(self, supername, {subname: value}) + + self._clone_properties() + + if isinstance(value, Model): + prop = StructuredProperty(Model, name) + elif isinstance(value, dict): + prop = StructuredProperty(Expando, name) + else: + prop = GenericProperty( + name, + repeated=isinstance(value, (list, tuple)), + indexed=self._default_indexed, + write_empty_list=self._write_empty_list_for_dynamic_properties, + ) + prop._code_name = name + self._properties[name] = prop + prop._set_value(self, value) + + def __delattr__(self, name): + if name.startswith("_") or isinstance( + getattr(self.__class__, name, None), (Property, property) + ): + return super(Expando, self).__delattr__(name) + prop = self._properties.get(name) + if not isinstance(prop, Property): + raise TypeError( + "Model properties must be Property instances; not %r" % prop + ) + prop._delete_value(self) + if name in super(Expando, self)._properties: + raise RuntimeError( + "Property %s still in the list of properties for the " + "base class." % name + ) + del self._properties[name] + + +@options_module.ReadOptions.options +@utils.positional(1) +def get_multi_async( + keys, + read_consistency=None, + read_policy=None, + transaction=None, + retries=None, + timeout=None, + deadline=None, + use_cache=None, + use_global_cache=None, + global_cache_timeout=None, + use_datastore=None, + use_memcache=None, + memcache_timeout=None, + max_memcache_items=None, + force_writes=None, + _options=None, +): + """Fetches a sequence of keys. + + Args: + keys (Sequence[:class:`~google.cloud.ndb.key.Key`]): A sequence of + keys. + read_consistency: Set this to ``ndb.EVENTUAL`` if, instead of + waiting for the Datastore to finish applying changes to all + returned results, you wish to get possibly-not-current results + faster. You can't do this if using a transaction. + transaction (bytes): Any results returned will be consistent with + the Datastore state represented by this transaction id. + Defaults to the currently running transaction. Cannot be used + with ``read_consistency=ndb.EVENTUAL``. + retries (int): Number of times to retry this operation in the case + of transient server errors. Operation will potentially be tried + up to ``retries`` + 1 times. Set to ``0`` to try operation only + once, with no retries. + timeout (float): Override the gRPC timeout, in seconds. + deadline (float): DEPRECATED: Synonym for ``timeout``. + use_cache (bool): Specifies whether to store entities in in-process + cache; overrides in-process cache policy for this operation. + use_global_cache (bool): Specifies whether to store entities in + global cache; overrides global cache policy for this operation. + use_datastore (bool): Specifies whether to store entities in + Datastore; overrides Datastore policy for this operation. + global_cache_timeout (int): Maximum lifetime for entities in global + cache; overrides global cache timeout policy for this + operation. + use_memcache (bool): DEPRECATED: Synonym for ``use_global_cache``. + memcache_timeout (int): DEPRECATED: Synonym for + ``global_cache_timeout``. + max_memcache_items (int): No longer supported. + read_policy: DEPRECATED: Synonym for ``read_consistency``. + force_writes (bool): No longer supported. + + Returns: + List[:class:`~google.cloud.ndb.tasklets.Future`]: List of futures. + """ + return [key.get_async(_options=_options) for key in keys] + + +@options_module.ReadOptions.options +@utils.positional(1) +def get_multi( + keys, + read_consistency=None, + read_policy=None, + transaction=None, + retries=None, + timeout=None, + deadline=None, + use_cache=None, + use_global_cache=None, + global_cache_timeout=None, + use_datastore=None, + use_memcache=None, + memcache_timeout=None, + max_memcache_items=None, + force_writes=None, + _options=None, +): + """Fetches a sequence of keys. + + Args: + keys (Sequence[:class:`~google.cloud.ndb.key.Key`]): A sequence of + keys. + read_consistency: Set this to ``ndb.EVENTUAL`` if, instead of + waiting for the Datastore to finish applying changes to all + returned results, you wish to get possibly-not-current results + faster. You can't do this if using a transaction. + transaction (bytes): Any results returned will be consistent with + the Datastore state represented by this transaction id. + Defaults to the currently running transaction. Cannot be used + with ``read_consistency=ndb.EVENTUAL``. + retries (int): Number of times to retry this operation in the case + of transient server errors. Operation will potentially be tried + up to ``retries`` + 1 times. Set to ``0`` to try operation only + once, with no retries. + timeout (float): Override the gRPC timeout, in seconds. + deadline (float): DEPRECATED: Synonym for ``timeout``. + use_cache (bool): Specifies whether to store entities in in-process + cache; overrides in-process cache policy for this operation. + use_global_cache (bool): Specifies whether to store entities in + global cache; overrides global cache policy for this operation. + use_datastore (bool): Specifies whether to store entities in + Datastore; overrides Datastore policy for this operation. + global_cache_timeout (int): Maximum lifetime for entities in global + cache; overrides global cache timeout policy for this + operation. + use_memcache (bool): DEPRECATED: Synonym for ``use_global_cache``. + memcache_timeout (int): DEPRECATED: Synonym for + ``global_cache_timeout``. + max_memcache_items (int): No longer supported. + read_policy: DEPRECATED: Synonym for ``read_consistency``. + force_writes (bool): No longer supported. + + Returns: + List[Union[:class:`~google.cloud.ndb.model.Model`, :data:`None`]]: List + containing the retrieved models or None where a key was not found. + """ + futures = [key.get_async(_options=_options) for key in keys] + return [future.result() for future in futures] + + +@options_module.Options.options +@utils.positional(1) +def put_multi_async( + entities, + retries=None, + timeout=None, + deadline=None, + use_cache=None, + use_global_cache=None, + global_cache_timeout=None, + use_datastore=None, + use_memcache=None, + memcache_timeout=None, + max_memcache_items=None, + force_writes=None, + _options=None, +): + """Stores a sequence of Model instances. + + Args: + retries (int): Number of times to retry this operation in the case + of transient server errors. Operation will potentially be tried + up to ``retries`` + 1 times. Set to ``0`` to try operation only + once, with no retries. + entities (List[:class:`~google.cloud.ndb.model.Model`]): A sequence + of models to store. + timeout (float): Override the gRPC timeout, in seconds. + deadline (float): DEPRECATED: Synonym for ``timeout``. + use_cache (bool): Specifies whether to store entities in in-process + cache; overrides in-process cache policy for this operation. + use_global_cache (bool): Specifies whether to store entities in + global cache; overrides global cache policy for this operation. + use_datastore (bool): Specifies whether to store entities in + Datastore; overrides Datastore policy for this operation. + global_cache_timeout (int): Maximum lifetime for entities in global + cache; overrides global cache timeout policy for this + operation. + use_memcache (bool): DEPRECATED: Synonym for ``use_global_cache``. + memcache_timeout (int): DEPRECATED: Synonym for + ``global_cache_timeout``. + max_memcache_items (int): No longer supported. + force_writes (bool): No longer supported. + + Returns: + List[:class:`~google.cloud.ndb.tasklets.Future`]: List of futures. + """ + return [entity.put_async(_options=_options) for entity in entities] + + +@options_module.Options.options +@utils.positional(1) +def put_multi( + entities, + retries=None, + timeout=None, + deadline=None, + use_cache=None, + use_global_cache=None, + global_cache_timeout=None, + use_datastore=None, + use_memcache=None, + memcache_timeout=None, + max_memcache_items=None, + force_writes=None, + _options=None, +): + """Stores a sequence of Model instances. + + Args: + entities (List[:class:`~google.cloud.ndb.model.Model`]): A sequence + of models to store. + retries (int): Number of times to retry this operation in the case + of transient server errors. Operation will potentially be tried + up to ``retries`` + 1 times. Set to ``0`` to try operation only + once, with no retries. + timeout (float): Override the gRPC timeout, in seconds. + deadline (float): DEPRECATED: Synonym for ``timeout``. + use_cache (bool): Specifies whether to store entities in in-process + cache; overrides in-process cache policy for this operation. + use_global_cache (bool): Specifies whether to store entities in + global cache; overrides global cache policy for this operation. + use_datastore (bool): Specifies whether to store entities in + Datastore; overrides Datastore policy for this operation. + global_cache_timeout (int): Maximum lifetime for entities in global + cache; overrides global cache timeout policy for this + operation. + use_memcache (bool): DEPRECATED: Synonym for ``use_global_cache``. + memcache_timeout (int): DEPRECATED: Synonym for + ``global_cache_timeout``. + max_memcache_items (int): No longer supported. + force_writes (bool): No longer supported. + + Returns: + List[:class:`~google.cloud.ndb.key.Key`]: A list with the stored keys. + """ + futures = [entity.put_async(_options=_options) for entity in entities] + return [future.result() for future in futures] + + +@options_module.Options.options +@utils.positional(1) +def delete_multi_async( + keys, + retries=None, + timeout=None, + deadline=None, + use_cache=None, + use_global_cache=None, + global_cache_timeout=None, + use_datastore=None, + use_memcache=None, + memcache_timeout=None, + max_memcache_items=None, + force_writes=None, + _options=None, +): + """Deletes a sequence of keys. + + Args: + retries (int): Number of times to retry this operation in the case + of transient server errors. Operation will potentially be tried + up to ``retries`` + 1 times. Set to ``0`` to try operation only + once, with no retries. + keys (Sequence[:class:`~google.cloud.ndb.key.Key`]): A sequence of + keys. + timeout (float): Override the gRPC timeout, in seconds. + deadline (float): DEPRECATED: Synonym for ``timeout``. + use_cache (bool): Specifies whether to store entities in in-process + cache; overrides in-process cache policy for this operation. + use_global_cache (bool): Specifies whether to store entities in + global cache; overrides global cache policy for this operation. + use_datastore (bool): Specifies whether to store entities in + Datastore; overrides Datastore policy for this operation. + global_cache_timeout (int): Maximum lifetime for entities in global + cache; overrides global cache timeout policy for this + operation. + use_memcache (bool): DEPRECATED: Synonym for ``use_global_cache``. + memcache_timeout (int): DEPRECATED: Synonym for + ``global_cache_timeout``. + max_memcache_items (int): No longer supported. + force_writes (bool): No longer supported. + + Returns: + List[:class:`~google.cloud.ndb.tasklets.Future`]: List of futures. + """ + return [key.delete_async(_options=_options) for key in keys] + + +@options_module.Options.options +@utils.positional(1) +def delete_multi( + keys, + retries=None, + timeout=None, + deadline=None, + use_cache=None, + use_global_cache=None, + global_cache_timeout=None, + use_datastore=None, + use_memcache=None, + memcache_timeout=None, + max_memcache_items=None, + force_writes=None, + _options=None, +): + """Deletes a sequence of keys. + + Args: + keys (Sequence[:class:`~google.cloud.ndb.key.Key`]): A sequence of + keys. + retries (int): Number of times to retry this operation in the case + of transient server errors. Operation will potentially be tried + up to ``retries`` + 1 times. Set to ``0`` to try operation only + once, with no retries. + timeout (float): Override the gRPC timeout, in seconds. + deadline (float): DEPRECATED: Synonym for ``timeout``. + use_cache (bool): Specifies whether to store entities in in-process + cache; overrides in-process cache policy for this operation. + use_global_cache (bool): Specifies whether to store entities in + global cache; overrides global cache policy for this operation. + use_datastore (bool): Specifies whether to store entities in + Datastore; overrides Datastore policy for this operation. + global_cache_timeout (int): Maximum lifetime for entities in global + cache; overrides global cache timeout policy for this + operation. + use_memcache (bool): DEPRECATED: Synonym for ``use_global_cache``. + memcache_timeout (int): DEPRECATED: Synonym for + ``global_cache_timeout``. + max_memcache_items (int): No longer supported. + force_writes (bool): No longer supported. + + Returns: + List[:data:`None`]: A list whose items are all None, one per deleted + key. + """ + futures = [key.delete_async(_options=_options) for key in keys] + return [future.result() for future in futures] + + +def get_indexes_async(**options): + """Get a data structure representing the configured indexes.""" + raise NotImplementedError + + +def get_indexes(**options): + """Get a data structure representing the configured indexes.""" + raise NotImplementedError + + +def _unpack_user(v): + """Internal helper to unpack a User value from a protocol buffer.""" + uv = v.uservalue() + email = str(uv.email().decode("utf-8")) + auth_domain = str(uv.auth_domain().decode("utf-8")) + obfuscated_gaiaid = uv.obfuscated_gaiaid().decode("utf-8") + obfuscated_gaiaid = str(obfuscated_gaiaid) + + value = User( + email=email, + _auth_domain=auth_domain, + _user_id=obfuscated_gaiaid, + ) + return value diff --git a/packages/google-cloud-ndb/google/cloud/ndb/msgprop.py b/packages/google-cloud-ndb/google/cloud/ndb/msgprop.py new file mode 100644 index 000000000000..7cbfa644069b --- /dev/null +++ b/packages/google-cloud-ndb/google/cloud/ndb/msgprop.py @@ -0,0 +1,31 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Define properties for directly storing ProtoRPC messages. + +These classes are not implemented. +""" + + +__all__ = ["EnumProperty", "MessageProperty"] + + +class EnumProperty(object): + def __init__(self, *args, **kwargs): + raise NotImplementedError + + +class MessageProperty(object): + def __init__(self, *args, **kwargs): + raise NotImplementedError diff --git a/packages/google-cloud-ndb/google/cloud/ndb/polymodel.py b/packages/google-cloud-ndb/google/cloud/ndb/polymodel.py new file mode 100644 index 000000000000..da192568b2ec --- /dev/null +++ b/packages/google-cloud-ndb/google/cloud/ndb/polymodel.py @@ -0,0 +1,264 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Polymorphic models and queries. + +The standard NDB Model class only supports 'functional polymorphism'. +That is, you can create a subclass of Model, and then subclass that +class, as many generations as necessary, and those classes will share +all the same properties and behaviors of their base classes. However, +subclassing Model in this way gives each subclass its own kind. This +means that it is not possible to do 'polymorphic queries'. Building a +query on a base class will only return entities whose kind matches +that base class's kind, and exclude entities that are instances of +some subclass of that base class. + +The PolyModel class defined here lets you create class hierarchies +that support polymorphic queries. Simply subclass PolyModel instead +of Model. +""" + +from google.cloud.ndb import model + + +__all__ = ["PolyModel"] + +_CLASS_KEY_PROPERTY = "class" + + +class _ClassKeyProperty(model.StringProperty): + """Property to store the 'class key' of a polymorphic class. + + The class key is a list of strings describing a polymorphic entity's + place within its class hierarchy. This property is automatically + calculated. For example: + + .. testsetup:: class-key-property + + from google.cloud import ndb + + + class Animal(ndb.PolyModel): + pass + + + class Feline(Animal): + pass + + + class Cat(Feline): + pass + + .. doctest:: class-key-property + + >>> Animal().class_ + ['Animal'] + >>> Feline().class_ + ['Animal', 'Feline'] + >>> Cat().class_ + ['Animal', 'Feline', 'Cat'] + """ + + def __init__(self, name=_CLASS_KEY_PROPERTY, indexed=True): + """Constructor. + + If you really want to you can give this a different datastore name + or make it unindexed. For example: + + .. code-block:: python + + class Foo(PolyModel): + class_ = _ClassKeyProperty(indexed=False) + """ + super(_ClassKeyProperty, self).__init__( + name=name, indexed=indexed, repeated=True + ) + + def _set_value(self, entity, value): + """The class_ property is read-only from the user's perspective.""" + raise TypeError("%s is a read-only property" % self._code_name) + + def _get_value(self, entity): + """Compute and store a default value if necessary.""" + value = super(_ClassKeyProperty, self)._get_value(entity) + if not value: + value = entity._class_key() + self._store_value(entity, value) + return value + + def _prepare_for_put(self, entity): + """Ensure the class_ property is initialized before it is serialized.""" + self._get_value(entity) # For its side effects. + + +class PolyModel(model.Model): + """Base class for class hierarchies supporting polymorphic queries. + + Use this class to build hierarchies that can be queried based on + their types. + + Example: + + Consider the following model hierarchy:: + + +------+ + |Animal| + +------+ + | + +-----------------+ + | | + +------+ +------+ + |Canine| |Feline| + +------+ +------+ + | | + +-------+ +-------+ + | | | | + +---+ +----+ +---+ +-------+ + |Dog| |Wolf| |Cat| |Panther| + +---+ +----+ +---+ +-------+ + + This class hierarchy has three levels. The first is the `root + class`. All models in a single class hierarchy must inherit from + this root. All models in the hierarchy are stored as the same + kind as the root class. For example, Panther entities when stored + to Cloud Datastore are of the kind `Animal`. Querying against the + Animal kind will retrieve Cats, Dogs and Canines, for example, + that match your query. Different classes stored in the `root + class` kind are identified by their class key. When loaded from + Cloud Datastore, it is mapped to the appropriate implementation + class. + + Polymorphic properties: + + Properties that are defined in a given base class within a + hierarchy are stored in Cloud Datastore for all subclasses only. + So, if the Feline class had a property called `whiskers`, the Cat + and Panther entities would also have whiskers, but not Animal, + Canine, Dog or Wolf. + + Polymorphic queries: + + When written to Cloud Datastore, all polymorphic objects + automatically have a property called `class` that you can query + against. Using this property it is possible to easily write a + query against any sub-hierarchy. For example, to fetch only + Canine objects, including all Dogs and Wolves: + + .. code-block:: python + + Canine.query() + + The `class` property is not meant to be used by your code other + than for queries. Since it is supposed to represent the real + Python class it is intended to be hidden from view. Although if + you feel the need, it is accessible as the `class_` attribute. + + Root class: + + The root class is the class from which all other classes of the + hierarchy inherits from. Each hierarchy has a single root class. + A class is a root class if it is an immediate child of PolyModel. + The subclasses of the root class are all the same kind as the root + class. In other words: + + .. code-block:: python + + Animal.kind() == Feline.kind() == Panther.kind() == 'Animal' + + Note: + + All classes in a given hierarchy must have unique names, since + the class name is used to identify the appropriate subclass. + """ + + class_ = _ClassKeyProperty() + + _class_map = {} # Map class key -> suitable subclass. + + @classmethod + def _update_kind_map(cls): + """Override; called by Model._fix_up_properties(). + + Update the kind map as well as the class map, except for PolyModel + itself (its class key is empty). Note that the kind map will + contain entries for all classes in a PolyModel hierarchy; they all + have the same kind, but different class names. PolyModel class + names, like regular Model class names, must be globally unique. + """ + cls._kind_map[cls._class_name()] = cls + class_key = cls._class_key() + if class_key: + cls._class_map[tuple(class_key)] = cls + + @classmethod + def _class_key(cls): + """Return the class key. + + This is a list of class names, e.g. ['Animal', 'Feline', 'Cat']. + """ + return [c._class_name() for c in cls._get_hierarchy()] + + @classmethod + def _get_kind(cls): + """Override. + + Make sure that the kind returned is the root class of the + polymorphic hierarchy. + """ + bases = cls._get_hierarchy() + if not bases: + # We have to jump through some hoops to call the superclass' + # _get_kind() method. First, this is called by the metaclass + # before the PolyModel name is defined, so it can't use + # super(PolyModel, cls)._get_kind(). Second, we can't just call + # Model._get_kind() because that always returns 'Model'. Hence + # the '__func__' hack. + return model.Model._get_kind.__func__(cls) + else: + return bases[0]._class_name() + + @classmethod + def _class_name(cls): + """Return the class name. + + This overrides Model._class_name() which is an alias for _get_kind(). + This is overridable in case you want to use a different class + name. The main use case is probably to maintain backwards + compatibility with datastore contents after renaming a class. + + NOTE: When overriding this for an intermediate class in your + hierarchy (as opposed to a leaf class), make sure to test + cls.__name__, or else all subclasses will appear to have the + same class name. + """ + return cls.__name__ + + @classmethod + def _get_hierarchy(cls): + """Internal helper to return the list of polymorphic base classes. + This returns a list of class objects, e.g. [Animal, Feline, Cat]. + """ + bases = [] + for base in cls.mro(): # pragma: no branch + if hasattr(base, "_get_hierarchy"): + bases.append(base) + del bases[-1] # Delete PolyModel itself + bases.reverse() + return bases + + @classmethod + def _default_filters(cls): + if len(cls._get_hierarchy()) <= 1: + return () + return (cls.class_ == cls._class_name(),) diff --git a/packages/google-cloud-ndb/google/cloud/ndb/query.py b/packages/google-cloud-ndb/google/cloud/ndb/query.py new file mode 100644 index 000000000000..76731ede2337 --- /dev/null +++ b/packages/google-cloud-ndb/google/cloud/ndb/query.py @@ -0,0 +1,2368 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""High-level wrapper for datastore queries. + +The fundamental API here overloads the 6 comparison operators to represent +filters on property values, and supports AND and OR operations (implemented as +functions -- Python's 'and' and 'or' operators cannot be overloaded, and the +'&' and '|' operators have a priority that conflicts with the priority of +comparison operators). + +For example:: + + class Employee(Model): + name = StringProperty() + age = IntegerProperty() + rank = IntegerProperty() + + @classmethod + def demographic(cls, min_age, max_age): + return cls.query().filter(AND(cls.age >= min_age, + cls.age <= max_age)) + + @classmethod + def ranked(cls, rank): + return cls.query(cls.rank == rank).order(cls.age) + + for emp in Employee.seniors(42, 5): + print(emp.name, emp.age, emp.rank) + +The 'in' operator cannot be overloaded, but is supported through the IN() +method. For example:: + + Employee.query().filter(Employee.rank.IN([4, 5, 6])) + +Sort orders are supported through the order() method; unary minus is +overloaded on the Property class to represent a descending order:: + + Employee.query().order(Employee.name, -Employee.age) + +Besides using AND() and OR(), filters can also be combined by repeatedly +calling .filter():: + + query1 = Employee.query() # A query that returns all employees + query2 = query1.filter(Employee.age >= 30) # Only those over 30 + query3 = query2.filter(Employee.age < 40) # Only those in their 30s + +A further shortcut is calling .filter() with multiple arguments; this implies +AND():: + + query1 = Employee.query() # A query that returns all employees + query3 = query1.filter(Employee.age >= 30, + Employee.age < 40) # Only those in their 30s + +And finally you can also pass one or more filter expressions directly to the +.query() method:: + + query3 = Employee.query(Employee.age >= 30, + Employee.age < 40) # Only those in their 30s + +Query objects are immutable, so these methods always return a new Query object; +the above calls to filter() do not affect query1. On the other hand, operations +that are effectively no-ops may return the original Query object. + +Sort orders can also be combined this way, and .filter() and .order() calls may +be intermixed:: + + query4 = query3.order(-Employee.age) + query5 = query4.order(Employee.name) + query6 = query5.filter(Employee.rank == 5) + +Again, multiple .order() calls can be combined:: + + query5 = query3.order(-Employee.age, Employee.name) + +The simplest way to retrieve Query results is a for-loop:: + + for emp in query3: + print emp.name, emp.age + +Some other methods to run a query and access its results:: + + :meth:`Query.iter`() # Return an iterator; same as iter(q) but more + flexible. + :meth:`Query.fetch`(N) # Return a list of the first N results + :meth:`Query.get`() # Return the first result + :meth:`Query.count`(N) # Return the number of results, with a maximum of N + :meth:`Query.fetch_page`(N, start_cursor=cursor) # Return (results, cursor, + has_more) + +All of the above methods take a standard set of additional query options, +in the form of keyword arguments such as keys_only=True. You can also pass +a QueryOptions object options=QueryOptions(...), but this is deprecated. + +The most important query options are: + +- keys_only: bool, if set the results are keys instead of entities. +- limit: int, limits the number of results returned. +- offset: int, skips this many results first. +- start_cursor: Cursor, start returning results after this position. +- end_cursor: Cursor, stop returning results after this position. + +The following query options have been deprecated or are not supported in +datastore queries: + +- batch_size: int, hint for the number of results returned per RPC. +- prefetch_size: int, hint for the number of results in the first RPC. +- produce_cursors: bool, return Cursor objects with the results. + +All of the above methods except for iter() have asynchronous variants as well, +which return a Future; to get the operation's ultimate result, yield the Future +(when inside a tasklet) or call the Future's get_result() method (outside a +tasklet):: + + :meth:`Query.fetch_async`(N) + :meth:`Query.get_async`() + :meth:`Query.count_async`(N) + :meth:`Query.fetch_page_async`(N, start_cursor=cursor) + +Finally, there's an idiom to efficiently loop over the Query results in a +tasklet, properly yielding when appropriate:: + + it = query1.iter() + while (yield it.has_next_async()): + emp = it.next() + print(emp.name, emp.age) +""" + +import functools +import logging + +from google.cloud.ndb import context as context_module +from google.cloud.ndb import exceptions +from google.cloud.ndb import _options +from google.cloud.ndb import tasklets +from google.cloud.ndb import utils + + +__all__ = [ + "QueryOptions", + "PropertyOrder", + "RepeatedStructuredPropertyPredicate", + "ParameterizedThing", + "Parameter", + "ParameterizedFunction", + "Node", + "FalseNode", + "ParameterNode", + "FilterNode", + "PostFilterNode", + "ConjunctionNode", + "DisjunctionNode", + "AND", + "OR", + "Query", + "gql", +] + + +_EQ_OP = "=" +_NE_OP = "!=" +_IN_OP = "in" +_NOT_IN_OP = "not_in" +_LT_OP = "<" +_GT_OP = ">" +_OPS = frozenset([_EQ_OP, _NE_OP, _LT_OP, "<=", _GT_OP, ">=", _IN_OP, _NOT_IN_OP]) + +_log = logging.getLogger(__name__) + + +class PropertyOrder(object): + """The sort order for a property name, to be used when ordering the + results of a query. + + Args: + name (str): The name of the model property to use for ordering. + reverse (bool): Whether to reverse the sort order (descending) + or not (ascending). Default is False. + """ + + def __init__(self, name, reverse=False): + self.name = name + self.reverse = reverse + + def __repr__(self): + return "PropertyOrder(name='{}', reverse={})".format(self.name, self.reverse) + + def __neg__(self): + reverse = not self.reverse + return self.__class__(name=self.name, reverse=reverse) + + +class RepeatedStructuredPropertyPredicate(object): + """A predicate for querying repeated structured properties. + + Called by ``model.StructuredProperty._compare``. This is used to handle + queries of the form:: + + Squad.query(Squad.members == Member(name="Joe", age=24, rank=5)) + + This query should find any squad with a member named "Joe" whose age is 24 + and rank is 5. + + Datastore, on its own, can find all squads with a team member named Joe, or + a team member whose age is 24, or whose rank is 5, but it can't be queried + for all 3 in a single subentity. This predicate must be applied client + side, therefore, to limit results to entities where all the keys match for + a single subentity. + + Arguments: + name (str): Name of the repeated structured property being queried + (e.g. "members"). + match_keys (list[str]): Property names to check on the subentities + being queried (e.g. ["name", "age", "rank"]). + entity_pb (google.cloud.datastore_v1.proto.entity_pb2.Entity): A + partial entity protocol buffer containing the values that must + match in a subentity of the repeated structured property. Should + contain a value for each key in ``match_keys``. + """ + + def __init__(self, name, match_keys, entity_pb): + self.name = name + self.match_keys = match_keys + self.match_values = [entity_pb.properties[key] for key in match_keys] + + def __call__(self, entity_pb): + prop_pb = entity_pb.properties.get(self.name) + if prop_pb: + subentities = prop_pb.array_value.values + for subentity in subentities: + properties = subentity.entity_value.properties + values = [properties.get(key) for key in self.match_keys] + if values == self.match_values: + return True + + else: + # Backwards compatibility. Legacy NDB, rather than using + # Datastore's ability to embed subentities natively, used dotted + # property names. + prefix = self.name + "." + subentities = () + for prop_name, prop_pb in entity_pb.properties.items(): + if not prop_name.startswith(prefix): + continue + + subprop_name = prop_name.split(".", 1)[1] + if not subentities: + subentities = [ + {subprop_name: value} for value in prop_pb.array_value.values + ] + else: + for subentity, value in zip( + subentities, prop_pb.array_value.values + ): + subentity[subprop_name] = value + + for subentity in subentities: + values = [subentity.get(key) for key in self.match_keys] + if values == self.match_values: + return True + + return False + + +class ParameterizedThing(object): + """Base class for :class:`Parameter` and :class:`ParameterizedFunction`. + + This exists purely for :func:`isinstance` checks. + """ + + def __eq__(self, other): + raise NotImplementedError + + def __ne__(self, other): + eq = self.__eq__(other) + if eq is not NotImplemented: + eq = not eq + return eq + + +class Parameter(ParameterizedThing): + """Represents a bound variable in a GQL query. + + ``Parameter(1)`` corresponds to a slot labeled ``:1`` in a GQL query. + ``Parameter('something')`` corresponds to a slot labeled ``:something``. + + The value must be set (bound) separately. + + Args: + key (Union[str, int]): The parameter key. + + Raises: + TypeError: If the ``key`` is not a string or integer. + """ + + def __init__(self, key): + if not isinstance(key, (int, str)): + raise TypeError( + "Parameter key must be an integer or string, not {}".format(key) + ) + self._key = key + + def __repr__(self): + return "{}({!r})".format(type(self).__name__, self._key) + + def __eq__(self, other): + if not isinstance(other, Parameter): + return NotImplemented + + return self._key == other._key + + @property + def key(self): + """Retrieve the key.""" + return self._key + + def resolve(self, bindings, used): + """Resolve the current parameter from the parameter bindings. + + Args: + bindings (dict): A mapping of parameter bindings. + used (Dict[Union[str, int], bool]): A mapping of already used + parameters. This will be modified if the current parameter + is in ``bindings``. + + Returns: + Any: The bound value for the current parameter. + + Raises: + exceptions.BadArgumentError: If the current parameter is not in ``bindings``. + """ + key = self._key + if key not in bindings: + raise exceptions.BadArgumentError("Parameter :{} is not bound.".format(key)) + value = bindings[key] + used[key] = True + return value + + +class ParameterizedFunction(ParameterizedThing): + """Represents a GQL function with parameterized arguments. + + For example, ParameterizedFunction('key', [Parameter(1)]) stands for + the GQL syntax KEY(:1). + """ + + def __init__(self, func, values): + self.func = func + self.values = values + + from google.cloud.ndb import _gql # avoid circular import + + _func = _gql.FUNCTIONS.get(func) + if _func is None: + raise ValueError("Unknown GQL function: {}".format(func)) + self._func = _func + + def __repr__(self): + return "ParameterizedFunction(%r, %r)" % (self.func, self.values) + + def __eq__(self, other): + if not isinstance(other, ParameterizedFunction): + return NotImplemented + return self.func == other.func and self.values == other.values + + def is_parameterized(self): + for value in self.values: + if isinstance(value, Parameter): + return True + return False + + def resolve(self, bindings, used): + values = [] + for value in self.values: + if isinstance(value, Parameter): + value = value.resolve(bindings, used) + values.append(value) + + return self._func(values) + + +class Node(object): + """Base class for filter expression tree nodes. + + Tree nodes are considered immutable, even though they can contain + Parameter instances, which are not. In particular, two identical + trees may be represented by the same Node object in different + contexts. + + Raises: + TypeError: Always, only subclasses are allowed. + """ + + _multiquery = False + + def __new__(cls): + if cls is Node: + raise TypeError("Cannot instantiate Node, only a subclass.") + return super(Node, cls).__new__(cls) + + def __eq__(self, other): + raise NotImplementedError + + def __ne__(self, other): + # Python 2.7 requires this method to be implemented. + eq = self.__eq__(other) + if eq is not NotImplemented: + eq = not eq + return eq + + def __le__(self, unused_other): + raise TypeError("Nodes cannot be ordered") + + def __lt__(self, unused_other): + raise TypeError("Nodes cannot be ordered") + + def __ge__(self, unused_other): + raise TypeError("Nodes cannot be ordered") + + def __gt__(self, unused_other): + raise TypeError("Nodes cannot be ordered") + + def _to_filter(self, post=False): + """Helper to convert to low-level filter. + + Raises: + NotImplementedError: Always. This method is virtual. + """ + raise NotImplementedError + + def _post_filters(self): + """Helper to extract post-filter nodes, if any. + + Returns: + None: Always. Because this is the base implementation. + """ + return None + + def resolve(self, bindings, used): + """Return a node with parameters replaced by the selected values. + + .. note:: + + Both ``bindings`` and ``used`` are unused by this base class + implementation. + + Args: + bindings (dict): A mapping of parameter bindings. + used (Dict[Union[str, int], bool]): A mapping of already used + parameters. This will be modified if the current parameter + is in ``bindings``. + + Returns: + Node: The current node. + """ + return self + + +class FalseNode(Node): + """Tree node for an always-failing filter.""" + + def __eq__(self, other): + """Equality check. + + An instance will always equal another :class:`FalseNode` instance. This + is because they hold no state. + """ + if not isinstance(other, FalseNode): + return NotImplemented + return True + + def _to_filter(self, post=False): + """(Attempt to) convert to a low-level filter instance. + + Args: + post (bool): Indicates if this is a post-filter node. + + Raises: + .BadQueryError: If ``post`` is :data:`False`, because there's no + point submitting a query that will never return anything. + """ + if post: + return None + raise exceptions.BadQueryError("Cannot convert FalseNode to predicate") + + +class ParameterNode(Node): + """Tree node for a parameterized filter. + + Args: + prop (~google.cloud.ndb.model.Property): A property describing a value + type. + op (str): The comparison operator. One of ``=``, ``!=``, ``<``, ``<=``, + ``>``, ``>=`` or ``in``. + param (ParameterizedThing): The parameter corresponding to the node. + + Raises: + TypeError: If ``prop`` is not a + :class:`~google.cloud.ndb.model.Property`. + TypeError: If ``op`` is not one of the accepted operators. + TypeError: If ``param`` is not a :class:`.Parameter` or + :class:`.ParameterizedFunction`. + """ + + def __new__(cls, prop, op, param): + # Avoid circular import in Python 2.7 + from google.cloud.ndb import model + + if not isinstance(prop, model.Property): + raise TypeError("Expected a Property, got {!r}".format(prop)) + if op not in _OPS: + raise TypeError("Expected a valid operator, got {!r}".format(op)) + if not isinstance(param, ParameterizedThing): + raise TypeError("Expected a ParameterizedThing, got {!r}".format(param)) + obj = super(ParameterNode, cls).__new__(cls) + obj._prop = prop + obj._op = op + obj._param = param + return obj + + def __getnewargs__(self): + """Private API used to specify ``__new__`` arguments when unpickling. + + .. note:: + + This method only applies if the ``pickle`` protocol is 2 or + greater. + + Returns: + Tuple[~google.cloud.ndb.model.Property, str, ParameterizedThing]: + A tuple containing the internal state: the property, operation and + parameter. + """ + return self._prop, self._op, self._param + + def __repr__(self): + return "ParameterNode({!r}, {!r}, {!r})".format( + self._prop, self._op, self._param + ) + + def __eq__(self, other): + if not isinstance(other, ParameterNode): + return NotImplemented + return ( + self._prop._name == other._prop._name + and self._op == other._op + and self._param == other._param + ) + + def _to_filter(self, post=False): + """Helper to convert to low-level filter. + + Args: + post (bool): Indicates if this is a post-filter node. + + Raises: + exceptions.BadArgumentError: Always. This is because this node represents + a parameter, i.e. no value exists to be filtered on. + """ + raise exceptions.BadArgumentError( + "Parameter :{} is not bound.".format(self._param.key) + ) + + def resolve(self, bindings, used): + """Return a node with parameters replaced by the selected values. + + Args: + bindings (dict): A mapping of parameter bindings. + used (Dict[Union[str, int], bool]): A mapping of already used + parameters. + + Returns: + Union[~google.cloud.ndb.query.DisjunctionNode, \ + ~google.cloud.ndb.query.FilterNode, \ + ~google.cloud.ndb.query.FalseNode]: A node corresponding to + the value substituted. + """ + value = self._param.resolve(bindings, used) + if self._op == _IN_OP: + return self._prop._IN(value) + elif self._op == _NOT_IN_OP: + return self._prop._NOT_IN(value) + else: + return self._prop._comparison(self._op, value) + + +class FilterNode(Node): + """Tree node for a single filter expression. + + For example ``FilterNode("a", ">", 3)`` filters for entities where the + value ``a`` is greater than ``3``. + + .. warning:: + + The constructor for this type may not always return a + :class:`FilterNode`. For example: + + * The filter ``name in (value1, ..., valueN)`` is converted into + ``(name = value1) OR ... OR (name = valueN)`` (also a + :class:`DisjunctionNode`) + * The filter ``name in ()`` (i.e. a property is among an empty list + of values) is converted into a :class:`FalseNode` + * The filter ``name in (value1,)`` (i.e. a list with one element) is + converted into ``name = value1``, a related :class:`FilterNode` + with a different ``opsymbol`` and ``value`` than what was passed + to the constructor + + Args: + name (str): The name of the property being filtered. + opsymbol (str): The comparison operator. One of ``=``, ``!=``, ``<``, + ``<=``, ``>``, ``>=`` or ``in``. + value (Any): The value to filter on / relative to. + server_op (bool): Force the operator to use a server side filter. + + Raises: + TypeError: If ``opsymbol`` is ``"in"`` but ``value`` is not a + basic container (:class:`list`, :class:`tuple`, :class:`set` or + :class:`frozenset`) + """ + + _name = None + _opsymbol = None + _value = None + + def __new__(cls, name, opsymbol, value, server_op=False): + # Avoid circular import in Python 2.7 + from google.cloud.ndb import model + + if isinstance(value, model.Key): + value = value._key + + if opsymbol == _IN_OP: + if not isinstance(value, (list, tuple, set, frozenset)): + raise TypeError( + "in expected a list, tuple or set of values; " + "received {!r}".format(value) + ) + nodes = [FilterNode(name, _EQ_OP, sub_value) for sub_value in value] + if not nodes: + return FalseNode() + if len(nodes) == 1: + return nodes[0] + if not server_op: + return DisjunctionNode(*nodes) + + instance = super(FilterNode, cls).__new__(cls) + instance._name = name + instance._opsymbol = opsymbol + instance._value = value + return instance + + def __getnewargs__(self): + """Private API used to specify ``__new__`` arguments when unpickling. + + .. note:: + + This method only applies if the ``pickle`` protocol is 2 or + greater. + + Returns: + Tuple[str, str, Any]: A tuple containing the + internal state: the name, ``opsymbol`` and value. + """ + return self._name, self._opsymbol, self._value + + def __repr__(self): + return "{}({!r}, {!r}, {!r})".format( + type(self).__name__, self._name, self._opsymbol, self._value + ) + + def __eq__(self, other): + if not isinstance(other, FilterNode): + return NotImplemented + + return ( + self._name == other._name + and self._opsymbol == other._opsymbol + and self._value == other._value + ) + + def _to_filter(self, post=False): + """Helper to convert to low-level filter. + + Args: + post (bool): Indicates if this is a post-filter node. + + Returns: + Optional[query_pb2.PropertyFilter]: Returns :data:`None`, if + this is a post-filter, otherwise returns the protocol buffer + representation of the filter. + """ + # Avoid circular import in Python 2.7 + from google.cloud.ndb import _datastore_query + + if post: + return None + + return _datastore_query.make_filter(self._name, self._opsymbol, self._value) + + +class PostFilterNode(Node): + """Tree node representing an in-memory filtering operation. + + This is used to represent filters that cannot be executed by the + datastore, for example a query for a structured value. + + Args: + predicate (Callable[[Any], bool]): A filter predicate that + takes a datastore entity (typically as a protobuf) and + returns :data:`True` or :data:`False` if the entity matches + the given filter. + """ + + def __new__(cls, predicate): + instance = super(PostFilterNode, cls).__new__(cls) + instance.predicate = predicate + return instance + + def __getnewargs__(self): + """Private API used to specify ``__new__`` arguments when unpickling. + + .. note:: + + This method only applies if the ``pickle`` protocol is 2 or + greater. + + Returns: + Tuple[Callable[[Any], bool],]: A tuple containing a single value, + the ``predicate`` attached to this node. + """ + return (self.predicate,) + + def __repr__(self): + return "{}({})".format(type(self).__name__, self.predicate) + + def __eq__(self, other): + if not isinstance(other, PostFilterNode): + return NotImplemented + return self is other or self.predicate == other.predicate + + def _to_filter(self, post=False): + """Helper to convert to low-level filter. + + Args: + post (bool): Indicates if this is a post-filter node. + + Returns: + Tuple[Callable[[Any], bool], None]: If this is a post-filter, this + returns the stored ``predicate``, otherwise it returns + :data:`None`. + """ + if post: + return self.predicate + else: + return None + + +class _BooleanClauses(object): + """This type will be used for symbolically performing boolean operations. + + Internally, the state will track a symbolic expression like:: + + A or (B and C) or (A and D) + + as a list of the ``OR`` components:: + + [A, B and C, A and D] + + When ``combine_or=False``, it will track ``AND`` statements as a list, + making the final simplified form of our example:: + + [[A], [B, C], [A, D]] + + Via :meth:`add_node`, we will ensure that new nodes will be correctly + combined (via ``AND`` or ``OR``) with the current expression. + + Args: + name (str): The name of the class that is tracking a + boolean expression. + combine_or (bool): Indicates if new nodes will be combined + with the current boolean expression via ``AND`` or ``OR``. + """ + + def __init__(self, name, combine_or): + self.name = name + self.combine_or = combine_or + if combine_or: + # For ``OR()`` the parts are just nodes. + self.or_parts = [] + else: + # For ``AND()`` the parts are "segments", i.e. node lists. + self.or_parts = [[]] + + def add_node(self, node): + """Update the current boolean expression. + + This uses the distributive law for sets to combine as follows: + + - ``(A or B or C or ...) or D`` -> ``A or B or C or ... or D`` + - ``(A or B or C or ...) and D`` -> + ``(A and D) or (B and D) or (C and D) or ...`` + + Args: + node (Node): A node to add to the list of clauses. + + Raises: + TypeError: If ``node`` is not a :class:`.Node`. + """ + if not isinstance(node, Node): + raise TypeError( + "{}() expects Node instances as arguments; " + "received a non-Node instance {!r}".format(self.name, node) + ) + + if self.combine_or: + if isinstance(node, DisjunctionNode): + # [S1 or ... or Sn] or [A1 or ... or Am] + # -> S1 or ... Sn or A1 or ... or Am + self.or_parts.extend(node._nodes) + else: + # [S1 or ... or Sn] or [A1] + # -> S1 or ... or Sn or A1 + self.or_parts.append(node) + else: + if isinstance(node, DisjunctionNode): + # [S1 or ... or Sn] and [A1 or ... or Am] + # -> [S1 and A1] or ... or [Sn and A1] or + # ... or [Sn and Am] or ... or [Sn and Am] + new_segments = [] + for segment in self.or_parts: + # ``segment`` represents ``Si`` + for sub_node in node: + # ``sub_node`` represents ``Aj`` + new_segment = segment + [sub_node] + new_segments.append(new_segment) + # Replace wholesale. + self.or_parts[:] = new_segments + elif isinstance(node, ConjunctionNode): + # [S1 or ... or Sn] and [A1 and ... and Am] + # -> [S1 and A1 and ... and Am] or ... or + # [Sn and A1 and ... and Am] + for segment in self.or_parts: + # ``segment`` represents ``Si`` + segment.extend(node._nodes) + else: + # [S1 or ... or Sn] and [A1] + # -> [S1 and A1] or ... or [Sn and A1] + for segment in self.or_parts: + segment.append(node) + + +class ConjunctionNode(Node): + """Tree node representing a boolean ``AND`` operator on multiple nodes. + + .. warning:: + + The constructor for this type may not always return a + :class:`ConjunctionNode`. For example: + + * If the passed in ``nodes`` has only one entry, that single node + will be returned by the constructor + * If the resulting boolean expression has an ``OR`` in it, then a + :class:`DisjunctionNode` will be returned; e.g. + ``AND(OR(A, B), C)`` becomes ``OR(AND(A, C), AND(B, C))`` + + Args: + nodes (Tuple[Node, ...]): A list of nodes to be joined. + + Raises: + TypeError: If ``nodes`` is empty. + RuntimeError: If the ``nodes`` combine to an "empty" boolean + expression. + """ + + def __new__(cls, *nodes): + if not nodes: + raise TypeError("ConjunctionNode() requires at least one node.") + elif len(nodes) == 1: + return nodes[0] + + clauses = _BooleanClauses("ConjunctionNode", combine_or=False) + for node in nodes: + clauses.add_node(node) + + if not clauses.or_parts: + # NOTE: The original implementation returned a ``FalseNode`` + # here but as far as I can tell this code is unreachable. + raise RuntimeError("Invalid boolean expression") + + if len(clauses.or_parts) > 1: + return DisjunctionNode( + *[ConjunctionNode(*segment) for segment in clauses.or_parts] + ) + + instance = super(ConjunctionNode, cls).__new__(cls) + instance._nodes = clauses.or_parts[0] + return instance + + def __getnewargs__(self): + """Private API used to specify ``__new__`` arguments when unpickling. + + .. note:: + + This method only applies if the ``pickle`` protocol is 2 or + greater. + + Returns: + Tuple[Node, ...]: The list of stored nodes, converted to a + :class:`tuple`. + """ + return tuple(self._nodes) + + def __iter__(self): + return iter(self._nodes) + + def __repr__(self): + all_nodes = ", ".join(map(str, self._nodes)) + return "AND({})".format(all_nodes) + + def __eq__(self, other): + if not isinstance(other, ConjunctionNode): + return NotImplemented + + return self._nodes == other._nodes + + def _to_filter(self, post=False): + """Helper to convert to low-level filter. + + Args: + post (bool): Indicates if this is a post-filter node. + + Returns: + Optional[Node]: The single or composite filter corresponding to + the pre- or post-filter nodes stored. May return :data:`None`. + """ + # Avoid circular import in Python 2.7 + from google.cloud.ndb import _datastore_query + + filters = [] + for node in self._nodes: + if isinstance(node, PostFilterNode) == post: + as_filter = node._to_filter(post=post) + if as_filter: + filters.append(as_filter) + + if not filters: + return None + if len(filters) == 1: + return filters[0] + + if post: + + def composite_and_predicate(entity_pb): + return all((filter(entity_pb) for filter in filters)) + + return composite_and_predicate + + return _datastore_query.make_composite_and_filter(filters) + + def _post_filters(self): + """Helper to extract post-filter nodes, if any. + + Filters all of the stored nodes that are :class:`PostFilterNode`. + + Returns: + Optional[Node]: One of the following: + + * :data:`None` if there are no post-filter nodes in this ``AND()`` + clause + * The single node if there is exactly one post-filter node, e.g. + if the only node in ``AND(A, B, ...)`` that is a post-filter + node is ``B`` + * The current node if every stored node a post-filter node, e.g. + if all nodes ``A, B, ...`` in ``AND(A, B, ...)`` are + post-filter nodes + * A **new** :class:`ConjunctionNode` containing the post-filter + nodes, e.g. if only ``A, C`` are post-filter nodes in + ``AND(A, B, C)``, then the returned node is ``AND(A, C)`` + """ + post_filters = [ + node for node in self._nodes if isinstance(node, PostFilterNode) + ] + if not post_filters: + return None + if len(post_filters) == 1: + return post_filters[0] + if post_filters == self._nodes: + return self + return ConjunctionNode(*post_filters) + + def resolve(self, bindings, used): + """Return a node with parameters replaced by the selected values. + + Args: + bindings (dict): A mapping of parameter bindings. + used (Dict[Union[str, int], bool]): A mapping of already used + parameters. This will be modified for each parameter found + in ``bindings``. + + Returns: + Node: The current node, if all nodes are already resolved. + Otherwise returns a modified :class:`ConjunctionNode` with + each individual node resolved. + """ + resolved_nodes = [node.resolve(bindings, used) for node in self._nodes] + if resolved_nodes == self._nodes: + return self + + return ConjunctionNode(*resolved_nodes) + + +class DisjunctionNode(Node): + """Tree node representing a boolean ``OR`` operator on multiple nodes. + + .. warning:: + + This constructor may not always return a :class:`DisjunctionNode`. + If the passed in ``nodes`` has only one entry, that single node + will be returned by the constructor. + + Args: + nodes (Tuple[Node, ...]): A list of nodes to be joined. + + Raises: + TypeError: If ``nodes`` is empty. + """ + + _multiquery = True + + def __new__(cls, *nodes): + if not nodes: + raise TypeError("DisjunctionNode() requires at least one node") + elif len(nodes) == 1: + return nodes[0] + + instance = super(DisjunctionNode, cls).__new__(cls) + instance._nodes = [] + + clauses = _BooleanClauses("DisjunctionNode", combine_or=True) + for node in nodes: + clauses.add_node(node) + + instance._nodes[:] = clauses.or_parts + return instance + + def __getnewargs__(self): + """Private API used to specify ``__new__`` arguments when unpickling. + + .. note:: + + This method only applies if the ``pickle`` protocol is 2 or + greater. + + Returns: + Tuple[Node, ...]: The list of stored nodes, converted to a + :class:`tuple`. + """ + return tuple(self._nodes) + + def __iter__(self): + return iter(self._nodes) + + def __repr__(self): + all_nodes = ", ".join(map(str, self._nodes)) + return "OR({})".format(all_nodes) + + def __eq__(self, other): + if not isinstance(other, DisjunctionNode): + return NotImplemented + + return self._nodes == other._nodes + + def resolve(self, bindings, used): + """Return a node with parameters replaced by the selected values. + + Args: + bindings (dict): A mapping of parameter bindings. + used (Dict[Union[str, int], bool]): A mapping of already used + parameters. This will be modified for each parameter found + in ``bindings``. + + Returns: + Node: The current node, if all nodes are already resolved. + Otherwise returns a modified :class:`DisjunctionNode` with + each individual node resolved. + """ + resolved_nodes = [node.resolve(bindings, used) for node in self._nodes] + if resolved_nodes == self._nodes: + return self + + return DisjunctionNode(*resolved_nodes) + + +# AND and OR are preferred aliases for these. +AND = ConjunctionNode +OR = DisjunctionNode + + +def _query_options(wrapped): + """A decorator for functions with query arguments for arguments. + + Many methods of :class:`Query` all take more or less the same arguments + from which they need to create a :class:`QueryOptions` instance following + the same somewhat complicated rules. + + This decorator wraps these methods with a function that does this + processing for them and passes in a :class:`QueryOptions` instance using + the ``_options`` argument to those functions, bypassing all of the + other arguments. + """ + # If there are any positional arguments, get their names. + # inspect.signature is not available in Python 2.7, so we use the + # arguments obtained with inspect.getarspec, which come from the + # positional decorator used with all query_options decorated methods. + arg_names = getattr(wrapped, "_positional_names", []) + positional = [arg for arg in arg_names if arg != "self"] + + # Provide dummy values for positional args to avoid TypeError + dummy_args = [None for _ in positional] + + @functools.wraps(wrapped) + def wrapper(self, *args, **kwargs): + # Avoid circular import in Python 2.7 + from google.cloud.ndb import context as context_module + from google.cloud.ndb import _datastore_api + + # Maybe we already did this (in the case of X calling X_async) + if "_options" in kwargs: + return wrapped(self, *dummy_args, _options=kwargs["_options"]) + + # Transfer any positional args to keyword args, so they're all in the + # same structure. + for name, value in zip(positional, args): + if name in kwargs: + raise TypeError( + "{}() got multiple values for argument '{}'".format( + wrapped.__name__, name + ) + ) + kwargs[name] = value + + options = kwargs.pop("options", None) + if options is not None: + _log.warning( + "Deprecation warning: passing 'options' to 'Query' methods is " + "deprecated. Please pass arguments directly." + ) + + projection = kwargs.get("projection") + if projection: + projection = _to_property_names(projection) + _check_properties(self.kind, projection) + kwargs["projection"] = projection + + if kwargs.get("keys_only"): + if kwargs.get("projection"): + raise TypeError("Cannot specify 'projection' with 'keys_only=True'") + kwargs["projection"] = ["__key__"] + del kwargs["keys_only"] + + if kwargs.get("transaction"): + read_consistency = kwargs.pop( + "read_consistency", kwargs.pop("read_policy", None) + ) + if read_consistency == _datastore_api.EVENTUAL: + raise TypeError( + "Can't use 'transaction' with 'read_policy=ndb.EVENTUAL'" + ) + + # The 'page_size' arg for 'fetch_page' can just be translated to + # 'limit' + page_size = kwargs.pop("page_size", None) + if page_size: + kwargs["limit"] = page_size + + # Get arguments for QueryOptions attributes + query_arguments = { + name: self._option(name, kwargs.pop(name, None), options) + for name in QueryOptions.slots() + } + + # Any left over kwargs don't actually correspond to slots in + # QueryOptions, but should be left to the QueryOptions constructor to + # sort out. Some might be synonyms or shorthand for other options. + query_arguments.update(kwargs) + + context = context_module.get_context() + query_options = QueryOptions(context=context, **query_arguments) + + return wrapped(self, *dummy_args, _options=query_options) + + return wrapper + + +class QueryOptions(_options.ReadOptions): + __slots__ = ( + # Query options + "kind", + "ancestor", + "filters", + "order_by", + "orders", + "distinct_on", + "group_by", + "namespace", + "project", + "database", + # Fetch options + "keys_only", + "limit", + "offset", + "start_cursor", + "end_cursor", + # Both (!?!) + "projection", + # Map only + "callback", + ) + + def __init__(self, config=None, context=None, **kwargs): + if kwargs.get("batch_size"): + raise exceptions.NoLongerImplementedError() + + if kwargs.get("prefetch_size"): + raise exceptions.NoLongerImplementedError() + + if kwargs.get("pass_batch_into_callback"): + raise exceptions.NoLongerImplementedError() + + if kwargs.get("merge_future"): + raise exceptions.NoLongerImplementedError() + + if kwargs.pop("produce_cursors", None): + _log.warning( + "Deprecation warning: 'produce_cursors' is deprecated. " + "Cursors are always produced when available. This option is " + "ignored." + ) + + super(QueryOptions, self).__init__(config=config, **kwargs) + + if context: + if not self.project: + self.project = context.client.project + + # We always use the client's database, for consistency with python-datastore + self.database = context.client.database + + if self.namespace is None: + if self.ancestor is None: + self.namespace = context.get_namespace() + else: + self.namespace = self.ancestor.namespace() + + +class Query(object): + """Query object. + + Args: + kind (str): The kind of entities to be queried. + filters (FilterNode): Node representing a filter expression tree. + ancestor (key.Key): Entities returned will be descendants of + `ancestor`. + order_by (list[Union[str, google.cloud.ndb.model.Property]]): The + model properties used to order query results. + orders (list[Union[str, google.cloud.ndb.model.Property]]): + Deprecated. Synonym for `order_by`. + project (str): The project to perform the query in. Also known as the + app, in Google App Engine. If not passed, uses the client's value. + app (str): Deprecated. Synonym for `project`. + namespace (str): The namespace to which to restrict results. + If not passed, uses the client's value. + projection (list[Union[str, google.cloud.ndb.model.Property]]): The + fields to return as part of the query results. + keys_only (bool): Return keys instead of entities. + offset (int): Number of query results to skip. + limit (Optional[int]): Maximum number of query results to return. + If not specified, there is no limit. + distinct_on (list[str]): The field names used to group query + results. + group_by (list[str]): Deprecated. Synonym for distinct_on. + default_options (QueryOptions): Deprecated. QueryOptions object. + Prefer passing explicit keyword arguments to the relevant method directly. + + Raises: + TypeError: If any of the arguments are invalid. + """ + + def __init__( + self, + kind=None, + filters=None, + ancestor=None, + order_by=None, + orders=None, + project=None, + app=None, + namespace=None, + projection=None, + distinct_on=None, + group_by=None, + limit=None, + offset=None, + keys_only=None, + default_options=None, + ): + # Avoid circular import in Python 2.7 + from google.cloud.ndb import model + + self.default_options = None + + if app: + if project: + raise TypeError( + "Cannot use both app and project, they are synonyms. app " + "is deprecated." + ) + project = app + + if default_options is not None: + _log.warning( + "Deprecation warning: passing default_options to the Query" + "constructor is deprecated. Please directly pass any " + "arguments you want to use to the Query constructor or its " + "methods." + ) + + if not isinstance(default_options, QueryOptions): + raise TypeError( + "default_options must be QueryOptions or None; " + "received {}".format(default_options) + ) + + # Not sure why we're doing all this checking just for this one + # option. + if projection is not None: + if getattr(default_options, "projection", None) is not None: + raise TypeError( + "cannot use projection keyword argument and " + "default_options.projection at the same time" + ) + + self.default_options = default_options + kind = self._option("kind", kind) + filters = self._option("filters", filters) + ancestor = self._option("ancestor", ancestor) + order_by = self._option("order_by", order_by) + orders = self._option("orders", orders) + project = self._option("project", project) + app = self._option("app", app) + namespace = self._option("namespace", namespace) + projection = self._option("projection", projection) + distinct_on = self._option("distinct_on", distinct_on) + group_by = self._option("group_by", group_by) + limit = self._option("limit", limit) + offset = self._option("offset", offset) + keys_only = self._option("keys_only", keys_only) + + # Except in the case of ancestor queries, we always use the client's database + database = context_module.get_context().client.database or None + + if ancestor is not None: + if isinstance(ancestor, ParameterizedThing): + if isinstance(ancestor, ParameterizedFunction): + if ancestor.func != "key": + raise TypeError( + "ancestor cannot be a GQL function" "other than Key" + ) + else: + if not isinstance(ancestor, model.Key): + raise TypeError( + "ancestor must be a Key; " "received {}".format(ancestor) + ) + if not ancestor.id(): + raise ValueError("ancestor cannot be an incomplete key") + if project is not None: + if project != ancestor.app(): + raise TypeError("ancestor/project id mismatch") + else: + project = ancestor.app() + + database = ancestor.database() + + if namespace is not None: + # if namespace is the empty string, that means default + # namespace, but after a put, if the ancestor is using + # the default namespace, its namespace will be None, + # so skip the test to avoid a false mismatch error. + if namespace == "" and ancestor.namespace() is None: + pass + elif namespace != ancestor.namespace(): + raise TypeError("ancestor/namespace mismatch") + else: + namespace = ancestor.namespace() + + if filters is not None: + if not isinstance(filters, Node): + raise TypeError( + "filters must be a query Node or None; " + "received {}".format(filters) + ) + if order_by is not None and orders is not None: + raise TypeError( + "Cannot use both orders and order_by, they are synonyms" + "(orders is deprecated now)" + ) + if order_by is None: + order_by = orders + if order_by is not None: + if not isinstance(order_by, (list, tuple)): + raise TypeError( + "order must be a list, a tuple or None; " + "received {}".format(order_by) + ) + order_by = self._to_property_orders(order_by) + + self.kind = kind + self.ancestor = ancestor + self.filters = filters + self.order_by = order_by + self.project = project + self.database = database + self.namespace = namespace + self.limit = limit + self.offset = offset + self.keys_only = keys_only + + self.projection = None + if projection is not None: + if not projection: + raise TypeError("projection argument cannot be empty") + if not isinstance(projection, (tuple, list)): + raise TypeError( + "projection must be a tuple, list or None; " + "received {}".format(projection) + ) + projection = _to_property_names(projection) + _check_properties(self.kind, projection) + self.projection = tuple(projection) + + if distinct_on is not None and group_by is not None: + raise TypeError( + "Cannot use both group_by and distinct_on, they are synonyms. " + "group_by is deprecated." + ) + if distinct_on is None: + distinct_on = group_by + + self.distinct_on = None + if distinct_on is not None: + if not distinct_on: + raise TypeError("distinct_on argument cannot be empty") + if not isinstance(distinct_on, (tuple, list)): + raise TypeError( + "distinct_on must be a tuple, list or None; " + "received {}".format(distinct_on) + ) + distinct_on = _to_property_names(distinct_on) + _check_properties(self.kind, distinct_on) + self.distinct_on = tuple(distinct_on) + + def __repr__(self): + args = [] + if self.project is not None: + args.append("project=%r" % self.project) + if self.namespace is not None: + args.append("namespace=%r" % self.namespace) + if self.kind is not None: + args.append("kind=%r" % self.kind) + if self.ancestor is not None: + args.append("ancestor=%r" % self.ancestor) + if self.filters is not None: + args.append("filters=%r" % self.filters) + if self.order_by is not None: + args.append("order_by=%r" % self.order_by) + if self.limit is not None: + args.append("limit=%r" % self.limit) + if self.offset is not None: + args.append("offset=%r" % self.offset) + if self.keys_only is not None: + args.append("keys_only=%r" % self.keys_only) + if self.projection: + args.append("projection=%r" % (_to_property_names(self.projection))) + if self.distinct_on: + args.append("distinct_on=%r" % (_to_property_names(self.distinct_on))) + if self.default_options is not None: + args.append("default_options=%r" % self.default_options) + return "%s(%s)" % (self.__class__.__name__, ", ".join(args)) + + @property + def is_distinct(self): + """True if results are guaranteed to contain a unique set of property + values. + + This happens when every property in distinct_on is also in projection. + """ + return bool( + self.distinct_on + and set(_to_property_names(self.distinct_on)) + <= set(_to_property_names(self.projection)) + ) + + def filter(self, *filters): + """Return a new Query with additional filter(s) applied. + + Args: + filters (list[Node]): One or more instances of Node. + + Returns: + Query: A new query with the new filters applied. + + Raises: + TypeError: If one of the filters is not a Node. + """ + if not filters: + return self + new_filters = [] + if self.filters: + new_filters.append(self.filters) + for filter in filters: + if not isinstance(filter, Node): + raise TypeError( + "Cannot filter a non-Node argument; received %r" % filter + ) + new_filters.append(filter) + if len(new_filters) == 1: + new_filters = new_filters[0] + else: + new_filters = ConjunctionNode(*new_filters) + return self.__class__( + kind=self.kind, + ancestor=self.ancestor, + filters=new_filters, + order_by=self.order_by, + project=self.project, + namespace=self.namespace, + default_options=self.default_options, + projection=self.projection, + distinct_on=self.distinct_on, + limit=self.limit, + offset=self.offset, + keys_only=self.keys_only, + ) + + def order(self, *props): + """Return a new Query with additional sort order(s) applied. + + Args: + props (list[Union[str, google.cloud.ndb.model.Property]]): One or + more model properties to sort by. + + Returns: + Query: A new query with the new order applied. + """ + if not props: + return self + property_orders = self._to_property_orders(props) + order_by = self.order_by + if order_by is None: + order_by = property_orders + else: + order_by.extend(property_orders) + return self.__class__( + kind=self.kind, + ancestor=self.ancestor, + filters=self.filters, + order_by=order_by, + project=self.project, + namespace=self.namespace, + default_options=self.default_options, + projection=self.projection, + distinct_on=self.distinct_on, + limit=self.limit, + offset=self.offset, + keys_only=self.keys_only, + ) + + def analyze(self): + """Return a list giving the parameters required by a query. + + When a query is created using gql, any bound parameters + are created as ParameterNode instances. This method returns + the names of any such parameters. + + Returns: + list[str]: required parameter names. + """ + + class MockBindings(dict): + def __contains__(self, key): + self[key] = None + return True + + bindings = MockBindings() + used = {} + ancestor = self.ancestor + if isinstance(ancestor, ParameterizedThing): + ancestor = ancestor.resolve(bindings, used) + filters = self.filters + if filters is not None: + filters = filters.resolve(bindings, used) + return sorted(used) # Returns only the keys. + + def bind(self, *positional, **keyword): + """Bind parameter values. Returns a new Query object. + + When a query is created using gql, any bound parameters + are created as ParameterNode instances. This method + receives values for both positional (:1, :2, etc.) or + keyword (:something, :other, etc.) bound parameters, then sets the + values accordingly. This mechanism allows easy reuse of a + parameterized query, by passing the values to bind here. + + Args: + positional (list[Any]): One or more positional values to bind. + keyword (dict[Any]): One or more keyword values to bind. + + Returns: + Query: A new query with the new bound parameter values. + + Raises: + google.cloud.ndb.exceptions.BadArgumentError: If one of + the positional parameters is not used in the query. + """ + bindings = dict(keyword) + for i, arg in enumerate(positional): + bindings[i + 1] = arg + used = {} + ancestor = self.ancestor + if isinstance(ancestor, ParameterizedThing): + ancestor = ancestor.resolve(bindings, used) + filters = self.filters + if filters is not None: + filters = filters.resolve(bindings, used) + unused = [] + for i, arg in enumerate(positional): + if i + 1 not in used: + unused.append(i + 1) + if unused: + raise exceptions.BadArgumentError( + "Positional arguments %s were given but not used." + % ", ".join(str(i) for i in unused) + ) + return self.__class__( + kind=self.kind, + ancestor=ancestor, + filters=filters, + order_by=self.order_by, + project=self.project, + namespace=self.namespace, + default_options=self.default_options, + projection=self.projection, + distinct_on=self.distinct_on, + limit=self.limit, + offset=self.offset, + keys_only=self.keys_only, + ) + + def _to_property_orders(self, order_by): + # Avoid circular import in Python 2.7 + from google.cloud.ndb import model + + orders = [] + for order in order_by: + if isinstance(order, PropertyOrder): + # if a negated property, will already be a PropertyOrder + orders.append(order) + elif isinstance(order, model.Property): + # use the sign to turn it into a PropertyOrder + orders.append(+order) + elif isinstance(order, str): + name = order + reverse = False + if order.startswith("-"): + name = order[1:] + reverse = True + property_order = PropertyOrder(name, reverse=reverse) + orders.append(property_order) + else: + raise TypeError("Order values must be properties or strings") + return orders + + @_query_options + @utils.keyword_only( + keys_only=None, + projection=None, + offset=None, + batch_size=None, + prefetch_size=None, + produce_cursors=False, + start_cursor=None, + end_cursor=None, + timeout=None, + deadline=None, + read_consistency=None, + read_policy=None, + transaction=None, + options=None, + _options=None, + ) + @utils.positional(2) + def fetch(self, limit=None, **kwargs): + """Run a query, fetching results. + + Args: + keys_only (bool): Return keys instead of entities. + projection (list[Union[str, google.cloud.ndb.model.Property]]): The + fields to return as part of the query results. + offset (int): Number of query results to skip. + limit (Optional[int]): Maximum number of query results to return. + If not specified, there is no limit. + batch_size: DEPRECATED: No longer implemented. + prefetch_size: DEPRECATED: No longer implemented. + produce_cursors: Ignored. Cursors always produced if available. + start_cursor: Starting point for search. + end_cursor: Endpoint point for search. + timeout (Optional[int]): Override the gRPC timeout, in seconds. + deadline (Optional[int]): DEPRECATED: Synonym for ``timeout``. + read_consistency: If set then passes the explicit read consistency to + the server. May not be set to ``ndb.EVENTUAL`` when a transaction + is specified. + read_policy: DEPRECATED: Synonym for ``read_consistency``. + transaction (bytes): Transaction ID to use for query. Results will + be consistent with Datastore state for that transaction. + Implies ``read_policy=ndb.STRONG``. + options (QueryOptions): DEPRECATED: An object containing options + values for some of these arguments. + + Returns: + List[Union[model.Model, key.Key]]: The query results. + """ + return self.fetch_async(_options=kwargs["_options"]).result() + + @_query_options + @utils.keyword_only( + keys_only=None, + projection=None, + offset=None, + batch_size=None, + prefetch_size=None, + produce_cursors=False, + start_cursor=None, + end_cursor=None, + timeout=None, + deadline=None, + read_consistency=None, + read_policy=None, + transaction=None, + options=None, + _options=None, + ) + @utils.positional(2) + def fetch_async(self, limit=None, **kwargs): + """Run a query, asynchronously fetching the results. + + Args: + keys_only (bool): Return keys instead of entities. + projection (list[Union[str, google.cloud.ndb.model.Property]]): The + fields to return as part of the query results. + offset (int): Number of query results to skip. + limit (Optional[int]): Maximum number of query results to return. + If not specified, there is no limit. + batch_size: DEPRECATED: No longer implemented. + prefetch_size: DEPRECATED: No longer implemented. + produce_cursors: Ignored. Cursors always produced if available. + start_cursor: Starting point for search. + end_cursor: Endpoint point for search. + timeout (Optional[int]): Override the gRPC timeout, in seconds. + deadline (Optional[int]): DEPRECATED: Synonym for ``timeout``. + read_consistency: If set then passes the explicit read consistency to + the server. May not be set to ``ndb.EVENTUAL`` when a transaction + is specified. + read_policy: DEPRECATED: Synonym for ``read_consistency``. + transaction (bytes): Transaction ID to use for query. Results will + be consistent with Datastore state for that transaction. + Implies ``read_policy=ndb.STRONG``. + options (QueryOptions): DEPRECATED: An object containing options + values for some of these arguments. + + Returns: + tasklets.Future: Eventual result will be a List[model.Model] of the + results. + """ + # Avoid circular import in Python 2.7 + from google.cloud.ndb import _datastore_query + + return _datastore_query.fetch(kwargs["_options"]) + + def _option(self, name, given, options=None): + """Get given value or a provided default for an option. + + Precedence is given first to the `given` value, then any value passed + in with `options`, then any value that is already set on this query, + and, lastly, any default value in `default_options` if provided to the + :class:`Query` constructor. + + This attempts to reconcile, in as rational a way possible, all the + different ways of passing the same option to a query established by + legacy NDB. Because of the absurd amount of complexity involved, + `QueryOptions` is deprecated in favor of just passing arguments + directly to the `Query` constructor or its methods. + + Args: + name (str): Name of the option. + given (Any): The given value for the option. + options (Optional[QueryOptions]): An object containing option + values. + + Returns: + Any: Either the given value or a provided default. + """ + if given is not None: + return given + + if options is not None: + value = getattr(options, name, None) + if value is not None: + return value + + value = getattr(self, name, None) + if value is not None: + return value + + if self.default_options is not None: + return getattr(self.default_options, name, None) + + return None + + def run_to_queue(self, queue, conn, options=None, dsquery=None): + """Run this query, putting entities into the given queue.""" + raise exceptions.NoLongerImplementedError() + + @_query_options + @utils.keyword_only( + keys_only=None, + limit=None, + projection=None, + offset=None, + batch_size=None, + prefetch_size=None, + produce_cursors=False, + start_cursor=None, + end_cursor=None, + timeout=None, + deadline=None, + read_consistency=None, + read_policy=None, + transaction=None, + options=None, + _options=None, + ) + @utils.positional(1) + def iter(self, **kwargs): + """Get an iterator over query results. + + Args: + keys_only (bool): Return keys instead of entities. + limit (Optional[int]): Maximum number of query results to return. + If not specified, there is no limit. + projection (list[str]): The fields to return as part of the query + results. + offset (int): Number of query results to skip. + batch_size: DEPRECATED: No longer implemented. + prefetch_size: DEPRECATED: No longer implemented. + produce_cursors: Ignored. Cursors always produced if available. + start_cursor: Starting point for search. + end_cursor: Endpoint point for search. + timeout (Optional[int]): Override the gRPC timeout, in seconds. + deadline (Optional[int]): DEPRECATED: Synonym for ``timeout``. + read_consistency: If set then passes the explicit read consistency to + the server. May not be set to ``ndb.EVENTUAL`` when a transaction + is specified. + read_policy: DEPRECATED: Synonym for ``read_consistency``. + transaction (bytes): Transaction ID to use for query. Results will + be consistent with Datastore state for that transaction. + Implies ``read_policy=ndb.STRONG``. + options (QueryOptions): DEPRECATED: An object containing options + values for some of these arguments. + + Returns: + :class:`QueryIterator`: An iterator. + """ + # Avoid circular import in Python 2.7 + from google.cloud.ndb import _datastore_query + + return _datastore_query.iterate(kwargs["_options"]) + + __iter__ = iter + + @_query_options + @utils.keyword_only( + keys_only=None, + limit=None, + projection=None, + offset=None, + batch_size=None, + prefetch_size=None, + produce_cursors=False, + start_cursor=None, + end_cursor=None, + timeout=None, + deadline=None, + read_consistency=None, + read_policy=None, + transaction=None, + options=None, + pass_batch_into_callback=None, + merge_future=None, + _options=None, + ) + @utils.positional(2) + def map(self, callback, **kwargs): + """Map a callback function or tasklet over the query results. + + Args: + callback (Callable): A function or tasklet to be applied to each + result; see below. + keys_only (bool): Return keys instead of entities. + projection (list[str]): The fields to return as part of the query + results. + offset (int): Number of query results to skip. + limit (Optional[int]): Maximum number of query results to return. + If not specified, there is no limit. + batch_size: DEPRECATED: No longer implemented. + prefetch_size: DEPRECATED: No longer implemented. + produce_cursors: Ignored. Cursors always produced if available. + start_cursor: Starting point for search. + end_cursor: Endpoint point for search. + timeout (Optional[int]): Override the gRPC timeout, in seconds. + deadline (Optional[int]): DEPRECATED: Synonym for ``timeout``. + read_consistency: If set then passes the explicit read consistency to + the server. May not be set to ``ndb.EVENTUAL`` when a transaction + is specified. + read_policy: DEPRECATED: Synonym for ``read_consistency``. + transaction (bytes): Transaction ID to use for query. Results will + be consistent with Datastore state for that transaction. + Implies ``read_policy=ndb.STRONG``. + options (QueryOptions): DEPRECATED: An object containing options + values for some of these arguments. + pass_batch_info_callback: DEPRECATED: No longer implemented. + merge_future: DEPRECATED: No longer implemented. + + Callback signature: The callback is normally called with an entity as + argument. However if keys_only=True is given, it is called with a Key. + The callback can return whatever it wants. + + Returns: + Any: When the query has run to completion and all callbacks have + returned, map() returns a list of the results of all callbacks. + """ + return self.map_async(None, _options=kwargs["_options"]).result() + + @tasklets.tasklet + @_query_options + @utils.keyword_only( + keys_only=None, + limit=None, + projection=None, + offset=None, + batch_size=None, + prefetch_size=None, + produce_cursors=False, + start_cursor=None, + end_cursor=None, + timeout=None, + deadline=None, + read_consistency=None, + read_policy=None, + transaction=None, + options=None, + pass_batch_into_callback=None, + merge_future=None, + _options=None, + ) + @utils.positional(2) + def map_async(self, callback, **kwargs): + """Map a callback function or tasklet over the query results. + + This is the asynchronous version of :meth:`Query.map`. + + Returns: + tasklets.Future: See :meth:`Query.map` for eventual result. + """ + # Avoid circular import in Python 2.7 + from google.cloud.ndb import _datastore_query + + _options = kwargs["_options"] + callback = _options.callback + futures = [] + results = _datastore_query.iterate(_options) + while (yield results.has_next_async()): + result = results.next() + mapped = callback(result) + if not isinstance(mapped, tasklets.Future): + future = tasklets.Future() + future.set_result(mapped) + mapped = future + futures.append(mapped) + + if futures: + mapped_results = yield futures + else: + mapped_results = () + + raise tasklets.Return(mapped_results) + + @_query_options + @utils.keyword_only( + keys_only=None, + projection=None, + batch_size=None, + prefetch_size=None, + produce_cursors=False, + start_cursor=None, + end_cursor=None, + timeout=None, + deadline=None, + read_consistency=None, + read_policy=None, + transaction=None, + options=None, + _options=None, + ) + @utils.positional(1) + def get(self, **kwargs): + """Get the first query result, if any. + + This is equivalent to calling ``q.fetch(1)`` and returning the first + result, if any. + + Args: + keys_only (bool): Return keys instead of entities. + projection (list[str]): The fields to return as part of the query + results. + batch_size: DEPRECATED: No longer implemented. + prefetch_size: DEPRECATED: No longer implemented. + produce_cursors: Ignored. Cursors always produced if available. + start_cursor: Starting point for search. + end_cursor: Endpoint point for search. + timeout (Optional[int]): Override the gRPC timeout, in seconds. + deadline (Optional[int]): DEPRECATED: Synonym for ``timeout``. + read_consistency: If set then passes the explicit read consistency to + the server. May not be set to ``ndb.EVENTUAL`` when a transaction + is specified. + read_policy: DEPRECATED: Synonym for ``read_consistency``. + transaction (bytes): Transaction ID to use for query. Results will + be consistent with Datastore state for that transaction. + Implies ``read_policy=ndb.STRONG``. + options (QueryOptions): DEPRECATED: An object containing options + values for some of these arguments. + + Returns: + Optional[Union[google.cloud.datastore.entity.Entity, key.Key]]: + A single result, or :data:`None` if there are no results. + """ + return self.get_async(_options=kwargs["_options"]).result() + + @tasklets.tasklet + @_query_options + @utils.keyword_only( + keys_only=None, + projection=None, + offset=None, + batch_size=None, + prefetch_size=None, + produce_cursors=False, + start_cursor=None, + end_cursor=None, + timeout=None, + deadline=None, + read_consistency=None, + read_policy=None, + transaction=None, + options=None, + _options=None, + ) + @utils.positional(1) + def get_async(self, **kwargs): + """Get the first query result, if any. + + This is the asynchronous version of :meth:`Query.get`. + + Returns: + tasklets.Future: See :meth:`Query.get` for eventual result. + """ + # Avoid circular import in Python 2.7 + from google.cloud.ndb import _datastore_query + + options = kwargs["_options"].copy(limit=1) + results = yield _datastore_query.fetch(options) + if results: + raise tasklets.Return(results[0]) + + @_query_options + @utils.keyword_only( + offset=None, + batch_size=None, + prefetch_size=None, + produce_cursors=False, + start_cursor=None, + end_cursor=None, + timeout=None, + deadline=None, + read_consistency=None, + read_policy=None, + transaction=None, + options=None, + _options=None, + ) + @utils.positional(2) + def count(self, limit=None, **kwargs): + """Count the number of query results, up to a limit. + + This returns the same result as ``len(q.fetch(limit))``. + + Note that you should pass a maximum value to limit the amount of + work done by the query. + + Note: + The legacy GAE version of NDB claims this is more efficient than + just calling ``len(q.fetch(limit))``. Since Datastore does not + provide API for ``count``, this version ends up performing the + fetch underneath hood. We can specify ``keys_only`` to save some + network traffic, making this call really equivalent to + ``len(q.fetch(limit, keys_only=True))``. We can also avoid + marshalling NDB key objects from the returned protocol buffers, but + this is a minor savings--most applications that use NDB will have + their performance bound by the Datastore backend, not the CPU. + Generally, any claim of performance improvement using this versus + the equivalent call to ``fetch`` is exaggerated, at best. + + Args: + limit (Optional[int]): Maximum number of query results to return. + If not specified, there is no limit. + projection (list[str]): The fields to return as part of the query + results. + offset (int): Number of query results to skip. + batch_size: DEPRECATED: No longer implemented. + prefetch_size: DEPRECATED: No longer implemented. + produce_cursors: Ignored. Cursors always produced if available. + start_cursor: Starting point for search. + end_cursor: Endpoint point for search. + timeout (Optional[int]): Override the gRPC timeout, in seconds. + deadline (Optional[int]): DEPRECATED: Synonym for ``timeout``. + read_consistency: If set then passes the explicit read consistency to + the server. May not be set to ``ndb.EVENTUAL`` when a transaction + is specified. + read_policy: DEPRECATED: Synonym for ``read_consistency``. + transaction (bytes): Transaction ID to use for query. Results will + be consistent with Datastore state for that transaction. + Implies ``read_policy=ndb.STRONG``. + options (QueryOptions): DEPRECATED: An object containing options + values for some of these arguments. + + Returns: + Optional[Union[google.cloud.datastore.entity.Entity, key.Key]]: + A single result, or :data:`None` if there are no results. + """ + return self.count_async(_options=kwargs["_options"]).result() + + @_query_options + @utils.keyword_only( + offset=None, + batch_size=None, + prefetch_size=None, + produce_cursors=False, + start_cursor=None, + end_cursor=None, + timeout=None, + deadline=None, + read_consistency=None, + read_policy=None, + transaction=None, + options=None, + _options=None, + ) + @utils.positional(2) + def count_async(self, limit=None, **kwargs): + """Count the number of query results, up to a limit. + + This is the asynchronous version of :meth:`Query.count`. + + Returns: + tasklets.Future: See :meth:`Query.count` for eventual result. + """ + # Avoid circular import in Python 2.7 + from google.cloud.ndb import _datastore_query + + return _datastore_query.count(kwargs["_options"]) + + @_query_options + @utils.keyword_only( + keys_only=None, + projection=None, + batch_size=None, + prefetch_size=None, + produce_cursors=False, + start_cursor=None, + end_cursor=None, + timeout=None, + deadline=None, + read_consistency=None, + read_policy=None, + transaction=None, + options=None, + _options=None, + ) + @utils.positional(2) + def fetch_page(self, page_size, **kwargs): + """Fetch a page of results. + + This is a specialized method for use by paging user interfaces. + + To fetch the next page, you pass the cursor returned by one call to the + next call using the `start_cursor` argument. A common idiom is to pass + the cursor to the client using :meth:`_datastore_query.Cursor.urlsafe` + and to reconstruct that cursor on a subsequent request using the + `urlsafe` argument to :class:`_datastore_query.Cursor`. + + NOTE: + This method relies on cursors which are not available for queries + that involve ``OR``, ``!=``, ``IN`` operators. This feature is not + available for those queries. + + Args: + page_size (int): The number of results per page. At most, this many + keys_only (bool): Return keys instead of entities. + projection (list[str]): The fields to return as part of the query + results. + batch_size: DEPRECATED: No longer implemented. + prefetch_size: DEPRECATED: No longer implemented. + produce_cursors: Ignored. Cursors always produced if available. + start_cursor: Starting point for search. + end_cursor: Endpoint point for search. + timeout (Optional[int]): Override the gRPC timeout, in seconds. + deadline (Optional[int]): DEPRECATED: Synonym for ``timeout``. + read_consistency: If set then passes the explicit read consistency to + the server. May not be set to ``ndb.EVENTUAL`` when a transaction + is specified. + read_policy: DEPRECATED: Synonym for ``read_consistency``. + transaction (bytes): Transaction ID to use for query. Results will + be consistent with Datastore state for that transaction. + Implies ``read_policy=ndb.STRONG``. + options (QueryOptions): DEPRECATED: An object containing options + values for some of these arguments. + + results will be returned. + + Returns: + Tuple[list, _datastore_query.Cursor, bool]: A tuple + `(results, cursor, more)` where `results` is a list of query + results, `cursor` is a cursor pointing just after the last + result returned, and `more` indicates whether there are + (likely) more results after that. + """ + return self.fetch_page_async(None, _options=kwargs["_options"]).result() + + @tasklets.tasklet + @_query_options + @utils.keyword_only( + keys_only=None, + projection=None, + batch_size=None, + prefetch_size=None, + produce_cursors=False, + start_cursor=None, + end_cursor=None, + timeout=None, + deadline=None, + read_consistency=None, + read_policy=None, + transaction=None, + options=None, + _options=None, + ) + @utils.positional(2) + def fetch_page_async(self, page_size, **kwargs): + """Fetch a page of results. + + This is the asynchronous version of :meth:`Query.fetch_page`. + + Returns: + tasklets.Future: See :meth:`Query.fetch_page` for eventual result. + """ + # Avoid circular import in Python 2.7 + from google.cloud.ndb import _datastore_query + + _options = kwargs["_options"] + if _options.filters: + if _options.filters._multiquery: + raise TypeError( + "Can't use 'fetch_page' or 'fetch_page_async' with query " + "that uses 'OR', '!=', or 'IN'." + ) + + iterator = _datastore_query.iterate(_options, raw=True) + results = [] + cursor = None + while (yield iterator.has_next_async()): + result = iterator.next() + results.append(result.entity()) + cursor = result.cursor + + more = bool(results) and ( + iterator._more_results_after_limit or iterator.probably_has_next() + ) + raise tasklets.Return(results, cursor, more) + + +def gql(query_string, *args, **kwds): + """Parse a GQL query string. + + Args: + query_string (str): Full GQL query, e.g. 'SELECT * FROM Kind WHERE + prop = 1 ORDER BY prop2'. + args: If present, used to call bind(). + kwds: If present, used to call bind(). + + Returns: + Query: a query instance. + + Raises: + google.cloud.ndb.exceptions.BadQueryError: When bad gql is passed in. + """ + # Avoid circular import in Python 2.7 + from google.cloud.ndb import _gql + + query = _gql.GQL(query_string).get_query() + if args or kwds: + query = query.bind(*args, **kwds) + return query + + +def _to_property_names(properties): + # Avoid circular import in Python 2.7 + from google.cloud.ndb import model + + fixed = [] + for prop in properties: + if isinstance(prop, str): + fixed.append(prop) + elif isinstance(prop, model.Property): + fixed.append(prop._name) + else: + raise TypeError( + "Unexpected property {}; " "should be string or Property".format(prop) + ) + return fixed + + +def _check_properties(kind, fixed, **kwargs): + # Avoid circular import in Python 2.7 + from google.cloud.ndb import model + + modelclass = model.Model._kind_map.get(kind) + if modelclass is not None: + modelclass._check_properties(fixed, **kwargs) diff --git a/packages/google-cloud-ndb/google/cloud/ndb/stats.py b/packages/google-cloud-ndb/google/cloud/ndb/stats.py new file mode 100644 index 000000000000..4eda7649ebf2 --- /dev/null +++ b/packages/google-cloud-ndb/google/cloud/ndb/stats.py @@ -0,0 +1,448 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Models for accessing datastore usage statistics. + +These entities cannot be created by users, but are populated in the +application's datastore by offline processes run by the Google Cloud team. +""" + +from google.cloud.ndb import model + + +__all__ = [ + "BaseKindStatistic", + "BaseStatistic", + "GlobalStat", + "KindCompositeIndexStat", + "KindNonRootEntityStat", + "KindPropertyNamePropertyTypeStat", + "KindPropertyNameStat", + "KindPropertyTypeStat", + "KindRootEntityStat", + "KindStat", + "NamespaceGlobalStat", + "NamespaceKindCompositeIndexStat", + "NamespaceKindNonRootEntityStat", + "NamespaceKindPropertyNamePropertyTypeStat", + "NamespaceKindPropertyNameStat", + "NamespaceKindPropertyTypeStat", + "NamespaceKindRootEntityStat", + "NamespaceKindStat", + "NamespacePropertyTypeStat", + "NamespaceStat", + "PropertyTypeStat", +] + + +class BaseStatistic(model.Model): + """Base Statistic Model class. + + Attributes: + bytes (int): the total number of bytes taken up in Cloud Datastore for + the statistic instance. + count (int): attribute is the total number of occurrences of the + statistic in Cloud Datastore. + timestamp (datetime.datetime): the time the statistic instance was + written to Cloud Datastore. + """ + + # This is necessary for the _get_kind() classmethod override. + STORED_KIND_NAME = "__BaseStatistic__" + + bytes = model.IntegerProperty() + + count = model.IntegerProperty() + + timestamp = model.DateTimeProperty() + + @classmethod + def _get_kind(cls): + """Kind name override.""" + return cls.STORED_KIND_NAME + + +class BaseKindStatistic(BaseStatistic): + """Base Statistic Model class for stats associated with kinds. + + Attributes: + kind_name (str): the name of the kind associated with the statistic + instance. + entity_bytes (int): the number of bytes taken up to store the statistic + in Cloud Datastore minus the cost of storing indices. + """ + + STORED_KIND_NAME = "__BaseKindStatistic__" + + kind_name = model.StringProperty() + + entity_bytes = model.IntegerProperty(default=0) + + +class GlobalStat(BaseStatistic): + """An aggregate of all entities across the entire application. + + This statistic only has a single instance in Cloud Datastore that contains + the total number of entities stored and the total number of bytes they take + up. + + Attributes: + entity_bytes (int): the number of bytes taken up to store the statistic + in Cloud Datastore minus the cost of storing indices. + builtin_index_bytes (int): the number of bytes taken up to store + built-in index entries. + builtin_index_count (int): the number of built-in index entries. + composite_index_bytes (int): the number of bytes taken up to store + composite index entries. + composite_index_count (int): the number of composite index entries. + """ + + STORED_KIND_NAME = "__Stat_Total__" + + entity_bytes = model.IntegerProperty(default=0) + + builtin_index_bytes = model.IntegerProperty(default=0) + + builtin_index_count = model.IntegerProperty(default=0) + + composite_index_bytes = model.IntegerProperty(default=0) + + composite_index_count = model.IntegerProperty(default=0) + + +class NamespaceStat(BaseStatistic): + """An aggregate of all entities across an entire namespace. + + This statistic has one instance per namespace. The key_name is the + represented namespace. NamespaceStat entities will only be found + in the namespace "" (empty string). It contains the total + number of entities stored and the total number of bytes they take up. + + Attributes: + subject_namespace (str): the namespace associated with the statistic + instance. + entity_bytes (int): the number of bytes taken up to store the statistic + in Cloud Datastore minus the cost of storing indices. + builtin_index_bytes (int): the number of bytes taken up to store + builtin-in index entries. + builtin_index_count (int): the number of built-in index entries. + composite_index_bytes (int): the number of bytes taken up to store + composite index entries. + composite_index_count (int): the number of composite index entries. + """ + + STORED_KIND_NAME = "__Stat_Namespace__" + + subject_namespace = model.StringProperty() + + entity_bytes = model.IntegerProperty(default=0) + + builtin_index_bytes = model.IntegerProperty(default=0) + + builtin_index_count = model.IntegerProperty(default=0) + + composite_index_bytes = model.IntegerProperty(default=0) + + composite_index_count = model.IntegerProperty(default=0) + + +class KindStat(BaseKindStatistic): + """An aggregate of all entities at the granularity of their Kind. + + There is an instance of the KindStat for every Kind that is in the + application's datastore. This stat contains per-Kind statistics. + + Attributes: + builtin_index_bytes (int): the number of bytes taken up to store + built-in index entries. + builtin_index_count (int): the number of built-in index entries. + composite_index_bytes (int): the number of bytes taken up to store + composite index entries. + composite_index_count (int): the number of composite index entries. + """ + + STORED_KIND_NAME = "__Stat_Kind__" + + builtin_index_bytes = model.IntegerProperty(default=0) + + builtin_index_count = model.IntegerProperty(default=0) + + composite_index_bytes = model.IntegerProperty(default=0) + + composite_index_count = model.IntegerProperty(default=0) + + +class KindRootEntityStat(BaseKindStatistic): + """Statistics of the number of root entities in Cloud Datastore by Kind. + + There is an instance of the KindRootEntityState for every Kind that is in + the application's datastore and has an instance that is a root entity. This + stat contains statistics regarding these root entity instances. + """ + + STORED_KIND_NAME = "__Stat_Kind_IsRootEntity__" + + +class KindNonRootEntityStat(BaseKindStatistic): + """Statistics of the number of non root entities in Cloud Datastore by Kind. + + There is an instance of the KindNonRootEntityStat for every Kind that is in + the application's datastore that is a not a root entity. This stat + contains statistics regarding these non root entity instances. + """ + + STORED_KIND_NAME = "__Stat_Kind_NotRootEntity__" + + +class PropertyTypeStat(BaseStatistic): + """An aggregate of all properties across the entire application by type. + + There is an instance of the PropertyTypeStat for every property type + (google.appengine.api.datastore_types._PROPERTY_TYPES) in use by the + application in its datastore. + + Attributes: + property_type (str): the property type associated with the statistic + instance. + entity_bytes (int): the number of bytes taken up to store the statistic + in Cloud Datastore minus the cost of storing indices. + builtin_index_bytes (int): the number of bytes taken up to store + built-in index entries. + builtin_index_count (int): the number of built-in index entries. + """ + + STORED_KIND_NAME = "__Stat_PropertyType__" + + property_type = model.StringProperty() + + entity_bytes = model.IntegerProperty(default=0) + + builtin_index_bytes = model.IntegerProperty(default=0) + + builtin_index_count = model.IntegerProperty(default=0) + + +class KindPropertyTypeStat(BaseKindStatistic): + """Statistics on (kind, property_type) tuples in the app's datastore. + + There is an instance of the KindPropertyTypeStat for every + (kind, property_type) tuple in the application's datastore. + + Attributes: + property_type (str): the property type associated with the statistic + instance. + builtin_index_bytes (int): the number of bytes taken up to store\ + built-in index entries. + builtin_index_count (int): the number of built-in index entries. + """ + + STORED_KIND_NAME = "__Stat_PropertyType_Kind__" + + property_type = model.StringProperty() + + builtin_index_bytes = model.IntegerProperty(default=0) + + builtin_index_count = model.IntegerProperty(default=0) + + +class KindPropertyNameStat(BaseKindStatistic): + """Statistics on (kind, property_name) tuples in the app's datastore. + + There is an instance of the KindPropertyNameStat for every + (kind, property_name) tuple in the application's datastore. + + Attributes: + property_name (str): the name of the property associated with the + statistic instance. + builtin_index_bytes (int): the number of bytes taken up to store + built-in index entries. + builtin_index_count (int): the number of built-in index entries. + """ + + STORED_KIND_NAME = "__Stat_PropertyName_Kind__" + + property_name = model.StringProperty() + + builtin_index_bytes = model.IntegerProperty(default=0) + + builtin_index_count = model.IntegerProperty(default=0) + + +class KindPropertyNamePropertyTypeStat(BaseKindStatistic): + """Statistic on (kind, property_name, property_type) tuples in Cloud + Datastore. + + There is an instance of the KindPropertyNamePropertyTypeStat for every + (kind, property_name, property_type) tuple in the application's datastore. + + Attributes: + property_type (str): the property type associated with the statistic + instance. + property_name (str): the name of the property associated with the + statistic instance. + builtin_index_bytes (int): the number of bytes taken up to store + built-in index entries + builtin_index_count (int): the number of built-in index entries. + """ + + STORED_KIND_NAME = "__Stat_PropertyType_PropertyName_Kind__" + + property_type = model.StringProperty() + + property_name = model.StringProperty() + + builtin_index_bytes = model.IntegerProperty(default=0) + + builtin_index_count = model.IntegerProperty(default=0) + + +class KindCompositeIndexStat(BaseStatistic): + """Statistic on (kind, composite_index_id) tuples in Cloud Datastore. + + There is an instance of the KindCompositeIndexStat for every unique + (kind, composite_index_id) tuple in the application's datastore indexes. + + Attributes: + index_id (int): the id of the composite index associated with the + statistic instance. + kind_name (str): the name of the kind associated with the statistic + instance. + """ + + STORED_KIND_NAME = "__Stat_Kind_CompositeIndex__" + + index_id = model.IntegerProperty() + + kind_name = model.StringProperty() + + +# The following specify namespace-specific stats. +# These types are specific to Cloud Datastore namespace they are located +# within. These will only be produced if datastore entities exist +# in a namespace other than the empty namespace (i.e. namespace=""). + + +class NamespaceGlobalStat(GlobalStat): + """GlobalStat equivalent for a specific namespace. + + These may be found in each specific namespace and represent stats for that + particular namespace. + """ + + STORED_KIND_NAME = "__Stat_Ns_Total__" + + +class NamespaceKindStat(KindStat): + """KindStat equivalent for a specific namespace. + + These may be found in each specific namespace and represent stats for that + particular namespace. + """ + + STORED_KIND_NAME = "__Stat_Ns_Kind__" + + +class NamespaceKindRootEntityStat(KindRootEntityStat): + """KindRootEntityStat equivalent for a specific namespace. + + These may be found in each specific namespace and represent stats for that + particular namespace. + """ + + STORED_KIND_NAME = "__Stat_Ns_Kind_IsRootEntity__" + + +class NamespaceKindNonRootEntityStat(KindNonRootEntityStat): + """KindNonRootEntityStat equivalent for a specific namespace. + + These may be found in each specific namespace and represent stats for that + particular namespace. + """ + + STORED_KIND_NAME = "__Stat_Ns_Kind_NotRootEntity__" + + +class NamespacePropertyTypeStat(PropertyTypeStat): + """PropertyTypeStat equivalent for a specific namespace. + + These may be found in each specific namespace and represent stats for that + particular namespace. + """ + + STORED_KIND_NAME = "__Stat_Ns_PropertyType__" + + +class NamespaceKindPropertyTypeStat(KindPropertyTypeStat): + """KindPropertyTypeStat equivalent for a specific namespace. + + These may be found in each specific namespace and represent stats for that + particular namespace. + """ + + STORED_KIND_NAME = "__Stat_Ns_PropertyType_Kind__" + + +class NamespaceKindPropertyNameStat(KindPropertyNameStat): + """KindPropertyNameStat equivalent for a specific namespace. + + These may be found in each specific namespace and represent stats for that + particular namespace. + """ + + STORED_KIND_NAME = "__Stat_Ns_PropertyName_Kind__" + + +class NamespaceKindPropertyNamePropertyTypeStat(KindPropertyNamePropertyTypeStat): + """KindPropertyNamePropertyTypeStat equivalent for a specific namespace. + + These may be found in each specific namespace and represent stats for that + particular namespace. + """ + + STORED_KIND_NAME = "__Stat_Ns_PropertyType_PropertyName_Kind__" + + +class NamespaceKindCompositeIndexStat(KindCompositeIndexStat): + """KindCompositeIndexStat equivalent for a specific namespace. + + These may be found in each specific namespace and represent stats for that + particular namespace. + """ + + STORED_KIND_NAME = "__Stat_Ns_Kind_CompositeIndex__" + + +# Maps a datastore stat entity kind name to its respective model class. +# NOTE: Any new stats added to this module should also be added here. +_DATASTORE_STATS_CLASSES_BY_KIND = { + GlobalStat.STORED_KIND_NAME: GlobalStat, + NamespaceStat.STORED_KIND_NAME: NamespaceStat, + KindStat.STORED_KIND_NAME: KindStat, + KindRootEntityStat.STORED_KIND_NAME: KindRootEntityStat, + KindNonRootEntityStat.STORED_KIND_NAME: KindNonRootEntityStat, + PropertyTypeStat.STORED_KIND_NAME: PropertyTypeStat, + KindPropertyTypeStat.STORED_KIND_NAME: KindPropertyTypeStat, + KindPropertyNameStat.STORED_KIND_NAME: KindPropertyNameStat, + KindPropertyNamePropertyTypeStat.STORED_KIND_NAME: KindPropertyNamePropertyTypeStat, # noqa: E501 + KindCompositeIndexStat.STORED_KIND_NAME: KindCompositeIndexStat, + NamespaceGlobalStat.STORED_KIND_NAME: NamespaceGlobalStat, + NamespaceKindStat.STORED_KIND_NAME: NamespaceKindStat, + NamespaceKindRootEntityStat.STORED_KIND_NAME: NamespaceKindRootEntityStat, + NamespaceKindNonRootEntityStat.STORED_KIND_NAME: NamespaceKindNonRootEntityStat, # noqa: E501 + NamespacePropertyTypeStat.STORED_KIND_NAME: NamespacePropertyTypeStat, + NamespaceKindPropertyTypeStat.STORED_KIND_NAME: NamespaceKindPropertyTypeStat, # noqa: E501 + NamespaceKindPropertyNameStat.STORED_KIND_NAME: NamespaceKindPropertyNameStat, # noqa: E501 + NamespaceKindPropertyNamePropertyTypeStat.STORED_KIND_NAME: NamespaceKindPropertyNamePropertyTypeStat, # noqa: E501 + NamespaceKindCompositeIndexStat.STORED_KIND_NAME: NamespaceKindCompositeIndexStat, # noqa: E501 +} diff --git a/packages/google-cloud-ndb/google/cloud/ndb/tasklets.py b/packages/google-cloud-ndb/google/cloud/ndb/tasklets.py new file mode 100644 index 000000000000..960c48d34b95 --- /dev/null +++ b/packages/google-cloud-ndb/google/cloud/ndb/tasklets.py @@ -0,0 +1,668 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Provides a tasklet decorator and related helpers. + +Tasklets are a way to write concurrently running functions without threads. +Tasklets are executed by an event loop and can suspend themselves blocking for +I/O or some other operation using a yield statement. The notion of a blocking +operation is abstracted into the Future class, but a tasklet may also yield an +RPC in order to wait for that RPC to complete. + +The @tasklet decorator wraps generator function so that when it is called, a +Future is returned while the generator is executed by the event loop. Within +the tasklet, any yield of a Future waits for and returns the Future's result. +For example:: + + from from google.cloud.ndb.tasklets import tasklet + + @tasklet + def foo(): + a = yield + b = yield + return a + b + + def main(): + f = foo() + x = f.result() + print(x) + +In this example, `foo` needs the results of two futures, `AFuture` and +`BFuture`, which it gets somehow, for example as results of calls. +Rather than waiting for their values and blocking, it yields. First, +the tasklet yields `AFuture`. The event loop gets `AFuture` and takes +care of waiting for its result. When the event loop gets the result +of `AFuture`, it sends it to the tasklet by calling `send` on the +iterator returned by calling the tasklet. The tasklet assigns the +value sent to `a` and then yields `BFuture`. Again the event loop +waits for the result of `BFuture` and sends it to the tasklet. The +tasklet then has what it needs to compute a result. + +The tasklet simply returns its result. (Behind the scenes, when you +return a value from a generator in Python 3, a `StopIteration` +exception is raised with the return value as its argument. The event +loop catches the exception and uses the exception argument as the +result of the tasklet.) + +Note that blocking until the Future's result is available using result() is +somewhat inefficient (though not vastly -- it is not busy-waiting). In most +cases such code should be rewritten as a tasklet instead:: + + @tasklet + def main_tasklet(): + f = foo() + x = yield f + print(x) + +Calling a tasklet automatically schedules it with the event loop:: + + def main(): + f = main_tasklet() + eventloop.run() # Run until no tasklets left to do + f.done() # Returns True +""" +import functools +import types + +from google.cloud.ndb import _eventloop +from google.cloud.ndb import exceptions +from google.cloud.ndb import _remote + +__all__ = [ + "add_flow_exception", + "Future", + "make_context", + "make_default_context", + "QueueFuture", + "ReducingFuture", + "Return", + "SerialQueueFuture", + "set_context", + "sleep", + "synctasklet", + "tasklet", + "toplevel", + "wait_all", + "wait_any", +] + + +class Future(object): + """Represents a task to be completed at an unspecified time in the future. + + This is the abstract base class from which all NDB ``Future`` classes are + derived. A future represents a task that is to be performed + asynchronously with the current flow of program control. + + Provides interface defined by :class:`concurrent.futures.Future` as well as + that of the legacy Google App Engine NDB ``Future`` class. + """ + + def __init__(self, info="Unknown"): + self.info = info + self._done = False + self._result = None + self._callbacks = [] + self._exception = None + + def __repr__(self): + return "{}({!r}) <{}>".format(type(self).__name__, self.info, id(self)) + + def done(self): + """Get whether future has finished its task. + + Returns: + bool: True if task has finished, False otherwise. + """ + return self._done + + def running(self): + """Get whether future's task is still running. + + Returns: + bool: False if task has finished, True otherwise. + """ + return not self._done + + def wait(self): + """Wait for this future's task to complete. + + This future will be done and will have either a result or an exception + after a call to this method. + """ + while not self._done: + if not _eventloop.run1(): + raise RuntimeError("Eventloop is exhausted with unfinished futures.") + + def check_success(self): + """Check whether a future has completed without raising an exception. + + This will wait for the future to finish its task and will then raise + the future's exception, if there is one, or else do nothing. + """ + self.wait() + + if self._exception: + raise self._exception + + def set_result(self, result): + """Set the result for this future. + + Signals that this future has completed its task and sets the result. + + Should not be called from user code. + """ + if self._done: + raise RuntimeError("Cannot set result on future that is done.") + + self._result = result + self._finish() + + def set_exception(self, exception): + """Set an exception for this future. + + Signals that this future's task has resulted in an exception. The + future is considered done but has no result. Once the exception is set, + calls to :meth:`done` will return True, and calls to :meth:`result` + will raise the exception. + + Should not be called from user code. + + Args: + exception (Exception): The exception that was raised. + """ + if self._done: + raise RuntimeError("Cannot set exception on future that is done.") + + self._exception = exception + self._finish() + + def _finish(self): + """Wrap up future upon completion. + + Sets `_done` to True and calls any registered callbacks. + """ + self._done = True + + for callback in self._callbacks: + callback(self) + + def result(self): + """Return the result of this future's task. + + If the task is finished, this will return immediately. Otherwise, this + will block until a result is ready. + + Returns: + Any: The result + """ + self.check_success() + return self._result + + get_result = result # Legacy NDB interface + + def exception(self): + """Get the exception for this future, if there is one. + + If the task has not yet finished, this will block until the task has + finished. When the task has finished, this will get the exception + raised during the task, or None, if no exception was raised. + + Returns: + Union[Exception, None]: The exception, or None. + """ + return self._exception + + get_exception = exception # Legacy NDB interface + + def get_traceback(self): + """Get the traceback for this future, if there is one. + + Included for backwards compatibility with legacy NDB. If there is an + exception for this future, this just returns the ``__traceback__`` + attribute of that exception. + + Returns: + Union[types.TracebackType, None]: The traceback, or None. + """ + if self._exception: + return self._exception.__traceback__ + + def add_done_callback(self, callback): + """Add a callback function to be run upon task completion. Will run + immediately if task has already finished. + + Args: + callback (Callable): The function to execute. + """ + if self._done: + callback(self) + else: + self._callbacks.append(callback) + + def cancel(self): + """Attempt to cancel the task for this future. + + If the task has already completed, this call will do nothing. + Otherwise, this will attempt to cancel whatever task this future is + waiting on. There is no specific guarantee the underlying task will be + cancelled. + """ + if not self.done(): + self.set_exception(exceptions.Cancelled()) + + def cancelled(self): + """Get whether the task for this future has been cancelled. + + Returns: + :data:`True`: If this future's task has been cancelled, otherwise + :data:`False`. + """ + return self._exception is not None and isinstance( + self._exception, exceptions.Cancelled + ) + + @staticmethod + def wait_any(futures): + """Calls :func:`wait_any`.""" + # For backwards compatibility + return wait_any(futures) + + @staticmethod + def wait_all(futures): + """Calls :func:`wait_all`.""" + # For backwards compatibility + return wait_all(futures) + + +class _TaskletFuture(Future): + """A future which waits on a tasklet. + + A future of this type wraps a generator derived from calling a tasklet. A + tasklet's generator is expected to yield future objects, either an instance + of :class:`Future` or :class:`_remote.RemoteCall`. The result of each + yielded future is then sent back into the generator until the generator has + completed and either returned a value or raised an exception. + + Args: + typing.Generator[Union[tasklets.Future, _remote.RemoteCall], Any, Any]: + The generator. + """ + + def __init__(self, generator, context, info="Unknown"): + super(_TaskletFuture, self).__init__(info=info) + self.generator = generator + self.context = context + self.waiting_on = None + + def _advance_tasklet(self, send_value=None, error=None): + """Advance a tasklet one step by sending in a value or error.""" + # Avoid Python 2.7 import error + from google.cloud.ndb import context as context_module + + try: + with self.context.use(): + # Send the next value or exception into the generator + if error: + traceback = error.__traceback__ + yielded = self.generator.throw(type(error), error, traceback) + + else: + # send_value will be None if this is the first time + yielded = self.generator.send(send_value) + + # Context may have changed in tasklet + self.context = context_module.get_context() + + except StopIteration as stop: + # Generator has signalled exit, get the return value. This tasklet + # has finished. + self.set_result(_get_return_value(stop)) + return + + except Return as stop: + # Tasklet has raised Return to return a result. This tasklet has + # finished. + self.set_result(_get_return_value(stop)) + return + + except Exception as error: + # An error has occurred in the tasklet. This tasklet has finished. + self.set_exception(error) + return + + # This tasklet has yielded a value. We expect this to be a future + # object (either NDB or gRPC) or a sequence of futures, in the case of + # parallel yield. + + def done_callback(yielded): + # To be called when a future dependency has completed. Advance the + # tasklet with the yielded value or error. + # + # It was tempting to call `_advance_tasklet` (`_help_tasklet_along` + # in Legacy) directly. Doing so, it has been found, can lead to + # exceeding the maximum recursion depth. Queuing it up to run on + # the event loop avoids this issue by keeping the call stack + # shallow. + self.waiting_on = None + + error = yielded.exception() + if error: + self.context.eventloop.call_soon(self._advance_tasklet, error=error) + else: + self.context.eventloop.call_soon( + self._advance_tasklet, yielded.result() + ) + + if isinstance(yielded, Future): + yielded.add_done_callback(done_callback) + self.waiting_on = yielded + + elif isinstance(yielded, _remote.RemoteCall): + self.context.eventloop.queue_rpc(yielded, done_callback) + self.waiting_on = yielded + + elif isinstance(yielded, (list, tuple)): + future = _MultiFuture(yielded) + future.add_done_callback(done_callback) + self.waiting_on = future + + else: + raise RuntimeError( + "A tasklet yielded an illegal value: {!r}".format(yielded) + ) + + def cancel(self): + """Overrides :meth:`Future.cancel`.""" + if self.waiting_on: + self.waiting_on.cancel() + + else: + super(_TaskletFuture, self).cancel() + + +def _get_return_value(stop): + """Inspect `StopIteration` instance for return value of tasklet. + + Args: + stop (StopIteration): The `StopIteration` exception for the finished + tasklet. + """ + if len(stop.args) == 1: + return stop.args[0] + + elif stop.args: + return stop.args + + +class _MultiFuture(Future): + """A future which depends on multiple other futures. + + This future will be done when either all dependencies have results or when + one dependency has raised an exception. + + Args: + dependencies (typing.Sequence[tasklets.Future]): A sequence of the + futures this future depends on. + """ + + def __init__(self, dependencies): + super(_MultiFuture, self).__init__() + futures = [] + for dependency in dependencies: + if isinstance(dependency, (list, tuple)): + dependency = _MultiFuture(dependency) + futures.append(dependency) + + self._dependencies = futures + + for dependency in futures: + dependency.add_done_callback(self._dependency_done) + + if not dependencies: + self.set_result(()) + + def __repr__(self): + return "{}({}) <{}>".format( + type(self).__name__, + ", ".join(map(repr, self._dependencies)), + id(self), + ) + + def _dependency_done(self, dependency): + if self._done: + return + + error = dependency.exception() + if error is not None: + self.set_exception(error) + return + + all_done = all((future.done() for future in self._dependencies)) + if all_done: + result = tuple((future.result() for future in self._dependencies)) + self.set_result(result) + + def cancel(self): + """Overrides :meth:`Future.cancel`.""" + for dependency in self._dependencies: + dependency.cancel() + + +def tasklet(wrapped): + """ + A decorator to turn a function or method into a tasklet. + + Calling a tasklet will return a :class:`~Future` instance which can be used + to get the eventual return value of the tasklet. + + For more information on tasklets and cooperative multitasking, see the main + documentation. + + Args: + wrapped (Callable): The wrapped function. + """ + + @functools.wraps(wrapped) + def tasklet_wrapper(*args, **kwargs): + # Avoid Python 2.7 circular import + from google.cloud.ndb import context as context_module + + # The normal case is that the wrapped function is a generator function + # that returns a generator when called. We also support the case that + # the user has wrapped a regular function with the tasklet decorator. + # In this case, we fail to realize an actual tasklet, but we go ahead + # and create a future object and set the result to the function's + # return value so that from the user perspective there is no problem. + # This permissive behavior is inherited from legacy NDB. + context = context_module.get_context() + + try: + returned = wrapped(*args, **kwargs) + except Return as stop: + # If wrapped is a regular function and the function uses "raise + # Return(result)" pattern rather than just returning the result, + # then we'll extract the result from the StopIteration exception. + returned = _get_return_value(stop) + + if isinstance(returned, types.GeneratorType): + # We have a tasklet, start it + future = _TaskletFuture(returned, context, info=wrapped.__name__) + future._advance_tasklet() + + else: + # We don't have a tasklet, but we fake it anyway + future = Future(info=wrapped.__name__) + future.set_result(returned) + + return future + + return tasklet_wrapper + + +def wait_any(futures): + """Wait for any of several futures to finish. + + Args: + futures (typing.Sequence[Future]): The futures to wait on. + + Returns: + Future: The first future to be found to have finished. + """ + if not futures: + return None + + while True: + for future in futures: + if future.done(): + return future + + if not _eventloop.run1(): + raise RuntimeError("Eventloop is exhausted with unfinished futures.") + + +def wait_all(futures): + """Wait for all of several futures to finish. + + Args: + futures (typing.Sequence[Future]): The futures to wait on. + """ + if not futures: + return + + for future in futures: + future.wait() + + +class Return(Exception): + """Return from a tasklet in Python 2. + + In Python 2, generators may not return a value. In order to return a value + from a tasklet, then, it is necessary to raise an instance of this + exception with the return value:: + + from google.cloud import ndb + + @ndb.tasklet + def get_some_stuff(): + future1 = get_something_async() + future2 = get_something_else_async() + thing1, thing2 = yield future1, future2 + result = compute_result(thing1, thing2) + raise ndb.Return(result) + + In Python 3, you can simply return the result:: + + @ndb.tasklet + def get_some_stuff(): + future1 = get_something_async() + future2 = get_something_else_async() + thing1, thing2 = yield future1, future2 + result = compute_result(thing1, thing2) + return result + + Note that Python 2 is no longer supported by the newest versions of Cloud NDB. + """ + + +def sleep(seconds): + """Sleep some amount of time in a tasklet. + Example: + ..code-block:: python + yield tasklets.sleep(0.5) # Sleep for half a second. + Arguments: + seconds (float): Amount of time, in seconds, to sleep. + Returns: + Future: Future will be complete after ``seconds`` have elapsed. + """ + future = Future(info="sleep({})".format(seconds)) + _eventloop.queue_call(seconds, future.set_result, None) + return future + + +def add_flow_exception(*args, **kwargs): + raise NotImplementedError + + +def make_context(*args, **kwargs): + raise NotImplementedError + + +def make_default_context(*args, **kwargs): + raise NotImplementedError + + +class QueueFuture(object): + def __init__(self, *args, **kwargs): + raise NotImplementedError + + +class ReducingFuture(object): + def __init__(self, *args, **kwargs): + raise NotImplementedError + + +class SerialQueueFuture(object): + def __init__(self, *args, **kwargs): + raise NotImplementedError + + +def set_context(*args, **kwargs): + raise NotImplementedError + + +def synctasklet(wrapped): + """A decorator to run a tasklet as a function when called. + + Use this to wrap a request handler function that will be called by some + web application framework (e.g. a Django view function or a + webapp.RequestHandler.get method). + + Args: + wrapped (Callable): The wrapped function. + """ + taskletfunc = tasklet(wrapped) + + @functools.wraps(wrapped) + def synctasklet_wrapper(*args, **kwargs): + return taskletfunc(*args, **kwargs).result() + + return synctasklet_wrapper + + +def toplevel(wrapped): + """A synctasklet decorator that flushes any pending work. + + Use of this decorator is largely unnecessary, as you should be using + :meth:`~google.cloud.ndb.client.Client.context` which also flushes pending + work when exiting the context. + + Args: + wrapped (Callable): The wrapped function." + """ + # Avoid Python 2.7 circular import + from google.cloud.ndb import context as context_module + + synctasklet_wrapped = synctasklet(wrapped) + + @functools.wraps(wrapped) + def toplevel_wrapper(*args, **kwargs): + context = context_module.get_context() + try: + with context.new().use(): + return synctasklet_wrapped(*args, **kwargs) + finally: + _eventloop.run() + + return toplevel_wrapper diff --git a/packages/google-cloud-ndb/google/cloud/ndb/utils.py b/packages/google-cloud-ndb/google/cloud/ndb/utils.py new file mode 100644 index 000000000000..a424532044c2 --- /dev/null +++ b/packages/google-cloud-ndb/google/cloud/ndb/utils.py @@ -0,0 +1,166 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Low-level utilities used internally by ``ndb``""" + + +import functools +import inspect +import os +import threading + +_getfullargspec = inspect.getfullargspec + +TRUTHY_STRINGS = {"t", "true", "y", "yes", "on", "1"} + + +def asbool(value): + """Convert an arbitrary value to a boolean. + Usually, `value`, will be a string. If `value` is already a boolean, it's + just returned as-is. + + Returns: + bool: `value` if `value` is a bool, `False` if `value` is `None`, + otherwise `True` if `value` converts to a lowercase string that is + "truthy" or `False` if it does not. + """ + if value is None: + return False + + if isinstance(value, bool): + return value + + value = str(value).strip() + return value.lower() in TRUTHY_STRINGS + + +DEBUG = asbool(os.environ.get("NDB_DEBUG", False)) + + +def code_info(*args, **kwargs): + raise NotImplementedError + + +def decorator(*args, **kwargs): + raise NotImplementedError + + +def frame_info(*args, **kwargs): + raise NotImplementedError + + +def func_info(*args, **kwargs): + raise NotImplementedError + + +def gen_info(*args, **kwargs): + raise NotImplementedError + + +def get_stack(*args, **kwargs): + raise NotImplementedError + + +def logging_debug(log, message, *args, **kwargs): + """Conditionally write to the debug log. + + In some Google App Engine environments, writing to the debug log is a + significant performance hit. If the environment variable `NDB_DEBUG` is set + to a "truthy" value, this function will call `log.debug(message, *args, + **kwargs)`, otherwise this is a no-op. + """ + if DEBUG: + message = str(message) + if args or kwargs: + message = message.format(*args, **kwargs) + + from google.cloud.ndb import context as context_module + + context = context_module.get_context(False) + if context: + message = "{}: {}".format(context.id, message) + + log.debug(message) + + +class keyword_only(object): + """A decorator to get some of the functionality of keyword-only arguments + from Python 3. It takes allowed keyword args and default values as + parameters. Raises TypeError if a keyword argument not included in those + parameters is passed in. + """ + + def __init__(self, **kwargs): + self.defaults = kwargs + + def __call__(self, wrapped): + @functools.wraps(wrapped) + def wrapper(*args, **kwargs): + new_kwargs = self.defaults.copy() + for kwarg in kwargs: + if kwarg not in new_kwargs: + raise TypeError( + "%s() got an unexpected keyword argument '%s'" + % (wrapped.__name__, kwarg) + ) + new_kwargs.update(kwargs) + return wrapped(*args, **new_kwargs) + + return wrapper + + +def positional(max_pos_args): + """A decorator to declare that only the first N arguments may be + positional. Note that for methods, n includes 'self'. This decorator + retains TypeError functionality from previous version, but adds two + attributes that can be used in combination with other decorators that + depend on inspect.signature, only available in Python 3. Note that this + decorator has to be closer to the function definition than other decorators + that need to access `_positional_names` or `_positional_args`. + """ + + def positional_decorator(wrapped): + root = getattr(wrapped, "_wrapped", wrapped) + wrapped._positional_args = max_pos_args + argspec = _getfullargspec(root) + wrapped._argspec = argspec + wrapped._positional_names = argspec.args[:max_pos_args] + + @functools.wraps(wrapped) + def positional_wrapper(*args, **kwds): + if len(args) > max_pos_args: + plural_s = "" + if max_pos_args != 1: + plural_s = "s" + raise TypeError( + "%s() takes at most %d positional argument%s (%d given)" + % (wrapped.__name__, max_pos_args, plural_s, len(args)) + ) + return wrapped(*args, **kwds) + + return positional_wrapper + + return positional_decorator + + +threading_local = threading.local + + +def tweak_logging(*args, **kwargs): + raise NotImplementedError + + +def wrapping(*args, **kwargs): + """Use functools.wraps instead""" + raise NotImplementedError diff --git a/packages/google-cloud-ndb/google/cloud/ndb/version.py b/packages/google-cloud-ndb/google/cloud/ndb/version.py new file mode 100644 index 000000000000..c6a8c90b9248 --- /dev/null +++ b/packages/google-cloud-ndb/google/cloud/ndb/version.py @@ -0,0 +1,15 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "2.3.4" diff --git a/packages/google-cloud-ndb/noxfile.py b/packages/google-cloud-ndb/noxfile.py new file mode 100644 index 000000000000..c8cd3321759a --- /dev/null +++ b/packages/google-cloud-ndb/noxfile.py @@ -0,0 +1,394 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Build and test configuration file. + +Assumes ``nox >= 2018.9.14`` is installed. +""" + +import os +import pathlib +import re +import shutil +import signal +import subprocess + +import nox + +LOCAL_DEPS = ("google-api-core", "google-cloud-core") +NOX_DIR = os.path.abspath(os.path.dirname(__file__)) +DEFAULT_INTERPRETER = "3.14" +ALL_INTERPRETERS = ("3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13", "3.14") +EMULTATOR_INTERPRETERS = ("3.9", "3.10", "3.11", "3.12", "3.13", "3.14") +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +BLACK_VERSION = "black[jupyter]==23.7.0" +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "google-cloud-testutils", + "google-cloud-core", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + +nox.options.sessions = [ + "prerelease_deps", + "unit-3.9", + "unit-3.10", + "unit-3.11", + "unit-3.12", + "unit-3.13", + "unit-3.14", + "cover", + "old-emulator-system", + "emulator-system", + "lint", + "blacken", + "docs", + "doctest", + "system", +] + + +def get_path(*names): + return os.path.join(NOX_DIR, *names) + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + session.install(*standard_deps, *constraints) + session.install("-e", ".", *constraints) + + +def default(session): + # Install all test dependencies, then install this package in-place. + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=DEFAULT_INTERPRETER) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): + """Run all tests with prerelease versions of dependencies installed.""" + + if protobuf_implementation == "cpp" and session.python in ( + "3.11", + "3.12", + "3.13", + "3.14", + ): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + session.install(*unit_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_INTERPRETERS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "grpc-google-iam-v1", + "google-cloud-datastore", + "googleapis-common-protos", + "grpcio", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +@nox.session(python=ALL_INTERPRETERS) +def unit(session): + """Run the unit test suite.""" + default(session) + + +@nox.session(py=DEFAULT_INTERPRETER) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(name="old-emulator-system", python=EMULTATOR_INTERPRETERS) +def old_emulator_system(session): + emulator_args = ["gcloud", "beta", "emulators", "datastore", "start"] + _run_emulator(session, emulator_args) + + +@nox.session(name="emulator-system", python=EMULTATOR_INTERPRETERS) +def emulator_system(session): + emulator_args = [ + "gcloud", + "emulators", + "firestore", + "start", + "--database-mode=datastore-mode", + ] + _run_emulator(session, emulator_args) + + +def _run_emulator(session, emulator_args): + """Run the system test suite.""" + # Only run the emulator tests manually. + if not session.interactive: + return + + # TODO: It would be better to allow the emulator to bind to any port and pull + # the port from stderr. + emulator_args.append("--host-port=localhost:8092") + emulator = subprocess.Popen(emulator_args, stderr=subprocess.PIPE) + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_folder_path = os.path.join("tests", "system") + + # Install all test dependencies, then install this package into the + # virtualenv's dist-packages. + session.install("pytest") + session.install("google-cloud-testutils") + for local_dep in LOCAL_DEPS: + session.install(local_dep) + session.install(".", "-c", constraints_path) + + # Run py.test against the system tests. + session.run( + "py.test", + "--quiet", + system_test_folder_path, + *session.posargs, + env={"DATASTORE_EMULATOR_HOST": "localhost:8092"}, + ) + session.run("curl", "-d", "", "localhost:8092/shutdown", external=True) + emulator.terminate() + emulator.wait(timeout=2) + + +def run_black(session, use_check=False): + args = ["black"] + if use_check: + args.append("--check") + + args.extend( + [ + get_path("docs"), + get_path("noxfile.py"), + get_path("google"), + get_path("tests"), + ] + ) + + session.run(*args) + + +@nox.session(py=DEFAULT_INTERPRETER) +def lint(session): + """Run linters. + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + run_black(session, use_check=True) + session.run("flake8", "google", "tests") + + +@nox.session(py=DEFAULT_INTERPRETER) +def blacken(session): + # Install all dependencies. + session.install(BLACK_VERSION) + # Run ``black``. + run_black(session) + + +@nox.session(py="3.10") +def docs(session): + """Build the docs for this library.""" + + session.install(".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(py="3.9") +def doctest(session): + # Install all dependencies. + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.0.1", + ) + session.install(".") + # Run the script for building docs and running doctest. + run_args = [ + "sphinx-build", + "-W", + "-b", + "doctest", + "-d", + get_path("docs", "_build", "doctrees"), + get_path("docs"), + get_path("docs", "_build", "doctest"), + ] + session.run(*run_args) + + +# Run the system tests +@nox.session(py=DEFAULT_INTERPRETER) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = get_path("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Sanity check: Only run tests if the environment variable is set. + if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): + session.skip("Credentials must be set via environment variable") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163. + session.install("--pre", "grpcio!=1.52.0rc1") + + # Install all test dependencies, then install this package into the + # virtualenv's dist-packages. + session.install("pytest") + session.install("google-cloud-testutils") + for local_dep in LOCAL_DEPS: + session.install(local_dep) + session.install(".", "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run("py.test", "--quiet", system_test_path, *session.posargs) + if system_test_folder_exists: + session.run("py.test", "--quiet", system_test_folder_path, *session.posargs) diff --git a/packages/google-cloud-ndb/renovate.json b/packages/google-cloud-ndb/renovate.json new file mode 100644 index 000000000000..c7875c469bd5 --- /dev/null +++ b/packages/google-cloud-ndb/renovate.json @@ -0,0 +1,12 @@ +{ + "extends": [ + "config:base", + "group:all", + ":preserveSemverRanges", + ":disableDependencyDashboard" + ], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py", ".github/workflows/unittest.yml"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } +} diff --git a/packages/google-cloud-ndb/setup.py b/packages/google-cloud-ndb/setup.py new file mode 100644 index 000000000000..a2e5a8572fd3 --- /dev/null +++ b/packages/google-cloud-ndb/setup.py @@ -0,0 +1,93 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import io +import os +import re + +import setuptools + + +PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) + +version = None + +with open(os.path.join(PACKAGE_ROOT, "google/cloud/ndb/version.py")) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert len(version_candidates) == 1 + version = version_candidates[0] + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +def main(): + package_root = os.path.abspath(os.path.dirname(__file__)) + readme_filename = os.path.join(package_root, "README.md") + with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + dependencies = [ + "google-api-core[grpc] >= 1.34.0, < 3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "google-cloud-datastore >= 2.16.0, != 2.20.2, < 3.0.0", + "protobuf >= 3.20.2, < 7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "pymemcache >= 2.1.0, < 5.0.0", + "pytz >= 2018.3", + "redis >= 3.0.0, < 7.0.0", + ] + + setuptools.setup( + name="google-cloud-ndb", + version = version, + description="NDB library for Google Cloud Datastore", + long_description=readme, + long_description_content_type="text/markdown", + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url="https://github.com/googleapis/python-ndb", + project_urls={ + 'Documentation': 'https://googleapis.dev/python/python-ndb/latest', + 'Issue Tracker': 'https://github.com/googleapis/python-ndb/issues' + }, + classifiers=[ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + install_requires=dependencies, + extras_require={}, + python_requires=">=3.7", + include_package_data=False, + zip_safe=False, + ) + + +if __name__ == "__main__": + main() diff --git a/packages/google-cloud-ndb/testing/constraints-3.10.txt b/packages/google-cloud-ndb/testing/constraints-3.10.txt new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-ndb/testing/constraints-3.11.txt b/packages/google-cloud-ndb/testing/constraints-3.11.txt new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-ndb/testing/constraints-3.12.txt b/packages/google-cloud-ndb/testing/constraints-3.12.txt new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-ndb/testing/constraints-3.13.txt b/packages/google-cloud-ndb/testing/constraints-3.13.txt new file mode 100644 index 000000000000..37fb0ed37393 --- /dev/null +++ b/packages/google-cloud-ndb/testing/constraints-3.13.txt @@ -0,0 +1,2 @@ +protobuf>=6 +redis>=6 diff --git a/packages/google-cloud-ndb/testing/constraints-3.14.txt b/packages/google-cloud-ndb/testing/constraints-3.14.txt new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-ndb/testing/constraints-3.7.txt b/packages/google-cloud-ndb/testing/constraints-3.7.txt new file mode 100644 index 000000000000..1ca48ea4fa16 --- /dev/null +++ b/packages/google-cloud-ndb/testing/constraints-3.7.txt @@ -0,0 +1,15 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List *all* library dependencies and extras in this file. +# Pin the version to the lower bound. +# +# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", +# Then this file should have foo==1.14.0 +google-cloud-datastore==2.16.0 +google-api-core==1.34.0 +protobuf==3.20.2 +pymemcache==2.1.0 +redis==3.0.0 +pytz==2018.3 +# TODO(https://github.com/googleapis/python-ndb/issues/913) remove this dependency once six is no longer used in the codebase +six==1.12.0 diff --git a/packages/google-cloud-ndb/testing/constraints-3.8.txt b/packages/google-cloud-ndb/testing/constraints-3.8.txt new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-ndb/testing/constraints-3.9.txt b/packages/google-cloud-ndb/testing/constraints-3.9.txt new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-ndb/tests/conftest.py b/packages/google-cloud-ndb/tests/conftest.py new file mode 100644 index 000000000000..c8d6b07dd358 --- /dev/null +++ b/packages/google-cloud-ndb/tests/conftest.py @@ -0,0 +1,151 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""py.test shared testing configuration. + +This defines fixtures (expected to be) shared across different test +modules. +""" + +import os + +from google.cloud import environment_vars +from google.cloud.ndb import context as context_module +from google.cloud.ndb import _eventloop +from google.cloud.ndb import global_cache as global_cache_module +from google.cloud.ndb import model +from google.cloud.ndb import utils + +import pytest + +from unittest import mock + +utils.DEBUG = True + + +class TestingEventLoop(_eventloop.EventLoop): + def call_soon(self, callback, *args, **kwargs): + """For testing, call the callback immediately.""" + callback(*args, **kwargs) + + +@pytest.fixture(autouse=True) +def reset_state(environ): + """Reset module and class level runtime state. + + To make sure that each test has the same starting conditions, we reset + module or class level datastructures that maintain runtime state. + + This resets: + + - ``model.Property._FIND_METHODS_CACHE`` + - ``model.Model._kind_map`` + """ + yield + model.Property._FIND_METHODS_CACHE.clear() + model.Model._kind_map.clear() + global_cache_module._InProcessGlobalCache.cache.clear() + + +@pytest.fixture +def environ(): + """Copy of ``os.environ``""" + original = os.environ + environ_copy = original.copy() + os.environ = environ_copy + yield environ_copy + os.environ = original + + +@pytest.fixture(autouse=True) +def initialize_environment(request, environ): + """Set environment variables to default values. + + There are some variables, like ``GOOGLE_APPLICATION_CREDENTIALS``, that we + want to reset for unit tests but not system tests. This fixture introspects + the current request, determines whether it's in a unit test, or not, and + does the right thing. + """ + if request.module.__name__.startswith("tests.unit"): # pragma: NO COVER + environ.pop(environment_vars.GCD_DATASET, None) + environ.pop(environment_vars.GCD_HOST, None) + environ.pop("GOOGLE_APPLICATION_CREDENTIALS", None) + + +@pytest.fixture +def context_factory(): + def context(**kwargs): + client = mock.Mock( + project="testing", + database=None, + namespace=None, + spec=("project", "database", "namespace"), + stub=mock.Mock(spec=()), + ) + context = context_module.Context( + client, + eventloop=TestingEventLoop(), + datastore_policy=True, + legacy_data=False, + **kwargs + ) + return context + + return context + + +@pytest.fixture +def context(context_factory): + return context_factory() + + +@pytest.fixture +def in_context(context): + assert not context_module._state.context + with context.use(): + yield context + assert not context_module._state.context + + +@pytest.fixture +def database(): + return "testdb" + + +@pytest.fixture +def namespace(): + return "UnitTest" + + +@pytest.fixture +def client_context(namespace, database): + from google.cloud import ndb + + client = ndb.Client() + context_manager = client.context( + cache_policy=False, legacy_data=False, database=database, namespace=namespace + ) + with context_manager as context: + yield context + + +@pytest.fixture +def global_cache(context): + assert not context_module._state.context + + cache = global_cache_module._InProcessGlobalCache() + with context.new(global_cache=cache).use(): + yield cache + + assert not context_module._state.context diff --git a/packages/google-cloud-ndb/tests/pytest.ini b/packages/google-cloud-ndb/tests/pytest.ini new file mode 100644 index 000000000000..15b7fe77b87c --- /dev/null +++ b/packages/google-cloud-ndb/tests/pytest.ini @@ -0,0 +1,2 @@ +[pytest] +addopts = --log-cli-level=WARN diff --git a/packages/google-cloud-ndb/tests/system/__init__.py b/packages/google-cloud-ndb/tests/system/__init__.py new file mode 100644 index 000000000000..b62228a35dc1 --- /dev/null +++ b/packages/google-cloud-ndb/tests/system/__init__.py @@ -0,0 +1,85 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import functools +import operator +import time + +KIND = "SomeKind" +OTHER_KIND = "OtherKind" + + +def eventually(f, predicate, timeout=120, interval=2): + """Runs `f` in a loop, hoping for eventual success. + + Some things we're trying to test in Datastore are eventually + consistent-we'll write something to the Datastore and can read back out + data, eventually. This is particularly true for metadata, where we can + write an entity to Datastore and it takes some amount of time for metadata + about the entity's "kind" to update to match the new data just written, + which can be challenging for system testing. + + With `eventually`, you can pass in a callable `predicate` which can tell us + whether the Datastore is now in a consistent state, at least for the piece + we're trying to test. This function will call the predicate repeatedly in a + loop until it either returns `True` or `timeout` is exceeded. + + Args: + f (Callable[[], Any]): A function to be called. Its result will be + passed to ``predicate`` to determine success or failure. + predicate (Callable[[Any], bool]): A function to be called with the + result of calling ``f``. A return value of :data:`True` indicates a + consistent state and will cause `eventually` to return so execution + can proceed in the calling context. + timeout (float): Time in seconds to wait for predicate to return + `True`. After this amount of time, `eventually` will return + regardless of `predicate` return value. + interval (float): Time in seconds to wait in between invocations of + `predicate`. + + Returns: + Any: The return value of ``f``. + + Raises: + AssertionError: If ``predicate`` fails to return :data:`True` before + the timeout has expired. + """ + deadline = time.time() + timeout + while time.time() < deadline: + value = f() + if predicate(value): + return value + time.sleep(interval) + + assert predicate(value) + + +def length_equals(n): + """Returns predicate that returns True if passed a sequence of length `n`. + + For use with `eventually`. + """ + + def predicate(sequence): + return len(sequence) == n + + return predicate + + +def equals(n): + """Returns predicate that returns True if passed `n`. + + For use with `eventually`. + """ + return functools.partial(operator.eq, n) diff --git a/packages/google-cloud-ndb/tests/system/_helpers.py b/packages/google-cloud-ndb/tests/system/_helpers.py new file mode 100644 index 000000000000..26d3de77fca8 --- /dev/null +++ b/packages/google-cloud-ndb/tests/system/_helpers.py @@ -0,0 +1,18 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from os import getenv + +_DATASTORE_DATABASE = "SYSTEM_TESTS_DATABASE" +TEST_DATABASE = getenv(_DATASTORE_DATABASE, "system-tests-named-db") diff --git a/packages/google-cloud-ndb/tests/system/conftest.py b/packages/google-cloud-ndb/tests/system/conftest.py new file mode 100644 index 000000000000..82e61762f2e1 --- /dev/null +++ b/packages/google-cloud-ndb/tests/system/conftest.py @@ -0,0 +1,200 @@ +import itertools +import logging +import os +import uuid + +import pytest +import requests + +from google.cloud import datastore +from google.cloud import ndb + +from google.cloud.ndb import global_cache as global_cache_module + +from . import KIND, OTHER_KIND, _helpers + +log = logging.getLogger(__name__) + + +@pytest.fixture(scope="session", autouse=True) +def preclean(): + """Clean out default namespace in test database.""" + _preclean(None, None) + if _helpers.TEST_DATABASE: + _preclean(_helpers.TEST_DATABASE, None) + + +def _preclean(database, namespace): + ds_client = _make_ds_client(database, namespace) + for kind in (KIND, OTHER_KIND): + query = ds_client.query(kind=kind) + query.keys_only() + for page in query.fetch().pages: + keys = [entity.key for entity in page] + ds_client.delete_multi(keys) + + +def _make_ds_client(database, namespace): + emulator = bool(os.environ.get("DATASTORE_EMULATOR_HOST")) + if emulator: + client = datastore.Client( + database=database, namespace=namespace, _http=requests.Session + ) + else: + client = datastore.Client(database=database, namespace=namespace) + + assert client.database == database + assert client.namespace == namespace + + return client + + +def all_entities(client, other_namespace): + return itertools.chain( + client.query(kind=KIND).fetch(), + client.query(kind=OTHER_KIND).fetch(), + client.query(namespace=other_namespace).fetch(), + ) + + +@pytest.fixture(scope="session") +def deleted_keys(): + return set() + + +@pytest.fixture +def to_delete(): + return [] + + +@pytest.fixture +def ds_client(database_id, namespace): + client = _make_ds_client(database_id, namespace) + assert client.database == database_id + assert client.namespace == namespace + return client + + +@pytest.fixture +def with_ds_client(ds_client, to_delete, deleted_keys, other_namespace): + yield ds_client + + # Clean up after ourselves + while to_delete: + batch = to_delete[:500] + ds_client.delete_multi(batch) + deleted_keys.update(batch) + to_delete = to_delete[500:] + + not_deleted = [ + entity + for entity in all_entities(ds_client, other_namespace) + if fix_key_db(entity.key, ds_client) not in deleted_keys + ] + if not_deleted: + log.warning("CLEAN UP: Entities not deleted from test: {}".format(not_deleted)) + + +@pytest.fixture +def ds_entity(with_ds_client, dispose_of): + def make_entity(*key_args, **entity_kwargs): + key = with_ds_client.key(*key_args) + assert with_ds_client.get(key) is None + entity = datastore.Entity(key=key) + entity.update(entity_kwargs) + with_ds_client.put(entity) + dispose_of(key) + + return entity + + yield make_entity + + +@pytest.fixture +def ds_entity_with_meanings(with_ds_client, dispose_of): + def make_entity(*key_args, **entity_kwargs): + meanings = key_args[0] + key = with_ds_client.key(*key_args[1:]) + assert with_ds_client.get(key) is None + entity = datastore.Entity(key=key, exclude_from_indexes=("blob",)) + entity._meanings = meanings + entity.update(entity_kwargs) + with_ds_client.put(entity) + dispose_of(key) + + return entity + + yield make_entity + + +# Workaround: datastore batches reject if key.database is None and client.database == "" +# or vice-versa. This should be fixed, but for now just fix the keys +# See https://github.com/googleapis/python-datastore/issues/460 +def fix_key_db(key, database): + if key.database: + return key + else: + fixed_key = key.__class__( + *key.flat_path, + project=key.project, + database=database, + namespace=key.namespace + ) + # If the current parent has already been set, we re-use + # the same instance + fixed_key._parent = key._parent + return fixed_key + + +@pytest.fixture +def dispose_of(with_ds_client, to_delete): + def delete_entity(*ds_keys): + to_delete.extend( + map(lambda key: fix_key_db(key, with_ds_client.database), ds_keys) + ) + + return delete_entity + + +@pytest.fixture(params=["", _helpers.TEST_DATABASE]) +def database_id(request): + return request.param + + +@pytest.fixture +def namespace(): + return str(uuid.uuid4()) + + +@pytest.fixture +def other_namespace(): + return str(uuid.uuid4()) + + +@pytest.fixture +def client_context(database_id, namespace): + client = ndb.Client(database=database_id) + assert client.database == database_id + context_manager = client.context( + cache_policy=False, + legacy_data=False, + namespace=namespace, + ) + with context_manager as context: + yield context + + +@pytest.fixture +def redis_context(client_context): + global_cache = global_cache_module.RedisCache.from_environment() + with client_context.new(global_cache=global_cache).use() as context: + context.set_global_cache_policy(None) # Use default + yield context + + +@pytest.fixture +def memcache_context(client_context): + global_cache = global_cache_module.MemcacheCache.from_environment() + with client_context.new(global_cache=global_cache).use() as context: + context.set_global_cache_policy(None) # Use default + yield context diff --git a/packages/google-cloud-ndb/tests/system/index.yaml b/packages/google-cloud-ndb/tests/system/index.yaml new file mode 100644 index 000000000000..1316f17b933d --- /dev/null +++ b/packages/google-cloud-ndb/tests/system/index.yaml @@ -0,0 +1,33 @@ +indexes: + +- kind: SomeKind + properties: + - name: bar + - name: foo + +- kind: SomeKind + properties: + - name: foo + - name: bar + +- kind: SomeKind + properties: + - name: bar.one + - name: bar.two + - name: foo + +- kind: SomeKind + properties: + - name: bar.three + - name: foo + +- kind: SomeKind + properties: + - name: foo + - name: bar.one + - name: bar.two + +- kind: Animal + properties: + - name: class + - name: foo diff --git a/packages/google-cloud-ndb/tests/system/test_crud.py b/packages/google-cloud-ndb/tests/system/test_crud.py new file mode 100644 index 000000000000..66d7d1dce830 --- /dev/null +++ b/packages/google-cloud-ndb/tests/system/test_crud.py @@ -0,0 +1,1962 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +System tests for Create, Update, Delete. (CRUD) +""" +import datetime +import os +import pickle +import pytz +import random +import threading +import zlib + +from unittest import mock + +import pytest + +import test_utils.system + +from google.cloud import ndb +from google.cloud.ndb import _cache +from google.cloud.ndb import global_cache as global_cache_module + +from . import KIND, eventually, equals + +USE_REDIS_CACHE = bool(os.environ.get("REDIS_CACHE_URL")) +USE_MEMCACHE = bool(os.environ.get("MEMCACHED_HOSTS")) + + +def _assert_contemporaneous(timestamp1, timestamp2, delta_margin=2): + delta_margin = datetime.timedelta(seconds=delta_margin) + assert delta_margin > abs(timestamp1 - timestamp2) + + +@pytest.mark.usefixtures("client_context") +def test_retrieve_entity(ds_entity): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=42, bar="none", baz=b"night") + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StringProperty() + baz = ndb.StringProperty() + + key = ndb.Key(KIND, entity_id) + entity = key.get() + assert isinstance(entity, SomeKind) + assert entity.foo == 42 + assert entity.bar == "none" + assert entity.baz == "night" + + +def test_retrieve_entity_with_caching(ds_entity, client_context): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=42, bar="none", baz=b"night") + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StringProperty() + baz = ndb.StringProperty() + + client_context.set_cache_policy(None) # Use default + + key = ndb.Key(KIND, entity_id) + entity = key.get() + assert isinstance(entity, SomeKind) + assert entity.foo == 42 + assert entity.bar == "none" + assert entity.baz == "night" + + assert key.get() is entity + + +def test_retrieve_entity_with_global_cache(ds_entity, client_context): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=42, bar="none", baz=b"night") + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StringProperty() + baz = ndb.StringProperty() + + global_cache = global_cache_module._InProcessGlobalCache() + with client_context.new(global_cache=global_cache).use() as context: + context.set_global_cache_policy(None) # Use default + + key = ndb.Key(KIND, entity_id) + entity = key.get() + assert isinstance(entity, SomeKind) + assert entity.foo == 42 + assert entity.bar == "none" + assert entity.baz == "night" + + cache_key = _cache.global_cache_key(key._key) + cache_value = global_cache.get([cache_key])[0] + assert cache_value + assert not _cache.is_locked_value(cache_value) + + patch = mock.patch( + "google.cloud.ndb._datastore_api._LookupBatch.add", + mock.Mock(side_effect=Exception("Shouldn't call this")), + ) + with patch: + entity = key.get() + assert isinstance(entity, SomeKind) + assert entity.foo == 42 + assert entity.bar == "none" + assert entity.baz == "night" + + +@pytest.mark.skipif(not USE_REDIS_CACHE, reason="Redis is not configured") +def test_retrieve_entity_with_redis_cache(ds_entity, redis_context): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=42, bar="none", baz=b"night") + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StringProperty() + baz = ndb.StringProperty() + + key = ndb.Key(KIND, entity_id) + entity = key.get() + assert isinstance(entity, SomeKind) + assert entity.foo == 42 + assert entity.bar == "none" + assert entity.baz == "night" + + cache_key = _cache.global_cache_key(key._key) + cache_value = redis_context.global_cache.redis.get(cache_key) + assert cache_value + assert not _cache.is_locked_value(cache_value) + + patch = mock.patch( + "google.cloud.ndb._datastore_api._LookupBatch.add", + mock.Mock(side_effect=Exception("Shouldn't call this")), + ) + with patch: + entity = key.get() + assert isinstance(entity, SomeKind) + assert entity.foo == 42 + assert entity.bar == "none" + assert entity.baz == "night" + + +@pytest.mark.skipif(not USE_MEMCACHE, reason="Memcache is not configured") +def test_retrieve_entity_with_memcache(ds_entity, memcache_context): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=42, bar="none", baz=b"night") + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StringProperty() + baz = ndb.StringProperty() + + key = ndb.Key(KIND, entity_id) + entity = key.get() + assert isinstance(entity, SomeKind) + assert entity.foo == 42 + assert entity.bar == "none" + assert entity.baz == "night" + + cache_key = _cache.global_cache_key(key._key) + cache_key = global_cache_module.MemcacheCache._key(cache_key) + cache_value = memcache_context.global_cache.client.get(cache_key) + assert cache_value + assert not _cache.is_locked_value(cache_value) + + patch = mock.patch( + "google.cloud.ndb._datastore_api._LookupBatch.add", + mock.Mock(side_effect=Exception("Shouldn't call this")), + ) + with patch: + entity = key.get() + assert isinstance(entity, SomeKind) + assert entity.foo == 42 + assert entity.bar == "none" + assert entity.baz == "night" + + +@pytest.mark.usefixtures("client_context") +def test_retrieve_entity_not_found(ds_entity): + entity_id = test_utils.system.unique_resource_id() + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StringProperty() + + key = ndb.Key(KIND, entity_id) + assert key.get() is None + + +@pytest.mark.usefixtures("client_context") +def test_nested_tasklet(ds_entity): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=42, bar="none") + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StringProperty() + + @ndb.tasklet + def get_foo(key): + entity = yield key.get_async() + raise ndb.Return(entity.foo) + + key = ndb.Key(KIND, entity_id) + assert get_foo(key).result() == 42 + + +@pytest.mark.usefixtures("client_context") +def test_retrieve_two_entities_in_parallel(ds_entity): + entity1_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity1_id, foo=42, bar="none") + entity2_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity2_id, foo=65, bar="naan") + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StringProperty() + + key1 = ndb.Key(KIND, entity1_id) + key2 = ndb.Key(KIND, entity2_id) + + @ndb.tasklet + def get_two_entities(): + entity1, entity2 = yield key1.get_async(), key2.get_async() + raise ndb.Return(entity1, entity2) + + entity1, entity2 = get_two_entities().result() + + assert isinstance(entity1, SomeKind) + assert entity1.foo == 42 + assert entity1.bar == "none" + + assert isinstance(entity2, SomeKind) + assert entity2.foo == 65 + assert entity2.bar == "naan" + + +@pytest.mark.usefixtures("client_context") +def test_retrieve_entities_in_parallel_nested(ds_entity): + """Regression test for #357. + + https://github.com/googleapis/python-ndb/issues/357 + """ + entity1_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity1_id, foo=42, bar="none") + entity2_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity2_id, foo=65, bar="naan") + entity3_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity3_id, foo=66, bar="route") + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StringProperty() + + key1 = ndb.Key(KIND, entity1_id) + key2 = ndb.Key(KIND, entity2_id) + key3 = ndb.Key(KIND, entity3_id) + + @ndb.tasklet + def get_two_entities(): + entity1, (entity2, entity3) = yield ( + key1.get_async(), + [key2.get_async(), key3.get_async()], + ) + raise ndb.Return(entity1, entity2, entity3) + + entity1, entity2, entity3 = get_two_entities().result() + + assert isinstance(entity1, SomeKind) + assert entity1.foo == 42 + assert entity1.bar == "none" + + assert isinstance(entity2, SomeKind) + assert entity2.foo == 65 + assert entity2.bar == "naan" + + assert isinstance(entity3, SomeKind) + assert entity3.foo == 66 + assert entity3.bar == "route" + + +@pytest.mark.usefixtures("client_context") +def test_insert_entity(dispose_of, ds_client): + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StringProperty() + + entity = SomeKind(foo=42, bar="none") + key = entity.put() + dispose_of(key._key) + + retrieved = key.get() + assert retrieved.foo == 42 + assert retrieved.bar == "none" + + # Make sure strings are stored as strings in datastore + ds_entity = ds_client.get(key._key) + assert ds_entity["bar"] == "none" + + +@pytest.mark.usefixtures("client_context") +def test_insert_entity_with_stored_name_property(dispose_of, ds_client): + class SomeKind(ndb.Model): + foo = ndb.StringProperty() + bar = ndb.StringProperty(name="notbar") + + entity = SomeKind(foo="something", bar="or other") + key = entity.put() + dispose_of(key._key) + + retrieved = key.get() + assert retrieved.foo == "something" + assert retrieved.bar == "or other" + + ds_entity = ds_client.get(key._key) + assert ds_entity["notbar"] == "or other" + + +@pytest.mark.usefixtures("client_context") +def test_insert_roundtrip_naive_datetime(dispose_of, ds_client): + class SomeKind(ndb.Model): + foo = ndb.DateTimeProperty() + + entity = SomeKind(foo=datetime.datetime(2010, 5, 12, 2, 42)) + key = entity.put() + dispose_of(key._key) + + retrieved = key.get() + assert retrieved.foo == datetime.datetime(2010, 5, 12, 2, 42) + + +@pytest.mark.usefixtures("client_context") +def test_datetime_w_tzinfo(dispose_of, ds_client): + class timezone(datetime.tzinfo): + def __init__(self, offset): + self.offset = datetime.timedelta(hours=offset) + + def utcoffset(self, dt): + return self.offset + + def dst(self, dt): + return datetime.timedelta(0) + + mytz = timezone(-4) + + class SomeKind(ndb.Model): + foo = ndb.DateTimeProperty(tzinfo=mytz) + bar = ndb.DateTimeProperty(tzinfo=mytz) + + entity = SomeKind( + foo=datetime.datetime(2010, 5, 12, 2, 42, tzinfo=timezone(-5)), + bar=datetime.datetime(2010, 5, 12, 2, 42), + ) + key = entity.put() + dispose_of(key._key) + + retrieved = key.get() + assert retrieved.foo == datetime.datetime(2010, 5, 12, 3, 42, tzinfo=mytz) + assert retrieved.bar == datetime.datetime(2010, 5, 11, 22, 42, tzinfo=mytz) + + +def test_parallel_threads(dispose_of, database_id, namespace): + client = ndb.Client(database=database_id, namespace=namespace) + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StringProperty() + + def insert(foo): + with client.context(cache_policy=False): + entity = SomeKind(foo=foo, bar="none") + + key = entity.put() + dispose_of(key._key) + + retrieved = key.get() + assert retrieved.foo == foo + assert retrieved.bar == "none" + + thread1 = threading.Thread(target=insert, args=[42], name="one") + thread2 = threading.Thread(target=insert, args=[144], name="two") + + thread1.start() + thread2.start() + + thread1.join() + thread2.join() + + +@pytest.mark.usefixtures("client_context") +def test_large_rpc_lookup(dispose_of, ds_client): + class SomeKind(ndb.Model): + foo = ndb.TextProperty() + + foo = "a" * (500 * 1024) + + keys = [] + for i in range(15): + key = SomeKind(foo=foo).put() + dispose_of(key._key) + keys.append(key) + + retrieved = ndb.get_multi(keys) + for entity in retrieved: + assert entity.foo == foo + + +@pytest.mark.usefixtures("client_context") +def test_large_json_property(dispose_of, ds_client): + class SomeKind(ndb.Model): + foo = ndb.JsonProperty() + + foo = {str(i): i for i in range(500)} + entity = SomeKind(foo=foo) + key = entity.put() + dispose_of(key._key) + + retrieved = key.get() + assert retrieved.foo == foo + + +@pytest.mark.usefixtures("client_context") +def test_compressed_json_property(dispose_of, ds_client): + class SomeKind(ndb.Model): + foo = ndb.JsonProperty(compressed=True) + + foo = {str(i): i for i in range(500)} + entity = SomeKind(foo=foo) + key = entity.put() + dispose_of(key._key) + + retrieved = key.get() + assert retrieved.foo == foo + + +@pytest.mark.usefixtures("client_context") +def test_compressed_blob_property(dispose_of, ds_client): + class SomeKind(ndb.Model): + foo = ndb.BlobProperty(compressed=True) + + foo = b"abc" * 100 + entity = SomeKind(foo=foo) + key = entity.put() + dispose_of(key._key) + + retrieved = key.get() + assert retrieved.foo == foo + + +@pytest.mark.usefixtures("client_context") +def test_compressed_repeated_local_structured_property(dispose_of, ds_client): + class Dog(ndb.Model): + name = ndb.StringProperty() + + class House(ndb.Model): + dogs = ndb.LocalStructuredProperty(Dog, repeated=True, compressed=True) + + entity = House() + dogs = [Dog(name="Mika"), Dog(name="Mocha")] + entity.dogs = dogs + + key = entity.put() + dispose_of(key._key) + + retrieved = key.get() + assert retrieved.dogs == dogs + + +def test_get_by_id_with_compressed_repeated_local_structured_property( + client_context, dispose_of, ds_client +): + class Dog(ndb.Model): + name = ndb.TextProperty() + + class House(ndb.Model): + dogs = ndb.LocalStructuredProperty(Dog, repeated=True, compressed=True) + + with client_context.new(legacy_data=True).use(): + entity = House() + dogs = [Dog(name="Mika"), Dog(name="Mocha")] + entity.dogs = dogs + + key = entity.put() + house_id = key.id() + dispose_of(key._key) + + retrieved = House.get_by_id(house_id) + assert retrieved.dogs == dogs + + +@pytest.mark.usefixtures("client_context") +def test_retrieve_entity_with_legacy_compressed_property( + ds_entity_with_meanings, +): + class SomeKind(ndb.Model): + blob = ndb.BlobProperty() + + value = b"abc" * 1000 + compressed_value = zlib.compress(value) + entity_id = test_utils.system.unique_resource_id() + ds_entity_with_meanings( + {"blob": (22, compressed_value)}, KIND, entity_id, **{"blob": compressed_value} + ) + + key = ndb.Key(KIND, entity_id) + retrieved = key.get() + assert retrieved.blob == value + + +@pytest.mark.usefixtures("client_context") +def test_large_pickle_property(dispose_of, ds_client): + class SomeKind(ndb.Model): + foo = ndb.PickleProperty() + + foo = {str(i): i for i in range(500)} + entity = SomeKind(foo=foo) + key = entity.put() + dispose_of(key._key) + + retrieved = key.get() + assert retrieved.foo == foo + + +@pytest.mark.usefixtures("client_context") +def test_key_property(dispose_of, ds_client): + class SomeKind(ndb.Model): + foo = ndb.KeyProperty() + + key_value = ndb.Key("Whatevs", 123) + entity = SomeKind(foo=key_value) + key = entity.put() + dispose_of(key._key) + + retrieved = key.get() + assert retrieved.foo == key_value + + +@pytest.mark.usefixtures("client_context") +def test_multiple_key_properties(dispose_of, ds_client): + class SomeKind(ndb.Model): + foo = ndb.KeyProperty(kind="Whatevs") + bar = ndb.KeyProperty(kind="Whatevs") + + foo = ndb.Key("Whatevs", 123) + bar = ndb.Key("Whatevs", 321) + entity = SomeKind(foo=foo, bar=bar) + key = entity.put() + dispose_of(key._key) + + retrieved = key.get() + assert retrieved.foo == foo + assert retrieved.bar == bar + assert retrieved.foo != retrieved.bar + + +def test_insert_entity_with_caching(client_context): + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StringProperty() + + client_context.set_cache_policy(None) # Use default + + entity = SomeKind(foo=42, bar="none") + key = entity.put() + + with client_context.new(cache_policy=False).use(): + # Sneaky. Delete entity out from under cache so we know we're getting + # cached copy. + key.delete() + eventually(key.get, equals(None)) + + retrieved = key.get() + assert retrieved.foo == 42 + assert retrieved.bar == "none" + + +def test_insert_entity_with_global_cache(dispose_of, client_context): + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StringProperty() + + global_cache = global_cache_module._InProcessGlobalCache() + with client_context.new(global_cache=global_cache).use() as context: + context.set_global_cache_policy(None) # Use default + + entity = SomeKind(foo=42, bar="none") + key = entity.put() + dispose_of(key._key) + cache_key = _cache.global_cache_key(key._key) + cache_value = global_cache.get([cache_key])[0] + assert not cache_value + + retrieved = key.get() + assert retrieved.foo == 42 + assert retrieved.bar == "none" + + cache_value = global_cache.get([cache_key])[0] + assert cache_value + assert not _cache.is_locked_value(cache_value) + + entity.foo = 43 + entity.put() + + cache_value = global_cache.get([cache_key])[0] + assert not cache_value + + +def test_insert_entity_with_use_global_cache_false(dispose_of, client_context): + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StringProperty() + + global_cache = global_cache_module._InProcessGlobalCache() + with client_context.new(global_cache=global_cache).use() as context: + context.set_global_cache_policy(None) # Use default + + entity = SomeKind(foo=42, bar="none") + key = entity.put(use_global_cache=False) + dispose_of(key._key) + cache_key = _cache.global_cache_key(key._key) + cache_value = global_cache.get([cache_key])[0] + assert not cache_value + + retrieved = key.get(use_global_cache=False) + assert retrieved.foo == 42 + assert retrieved.bar == "none" + + cache_value = global_cache.get([cache_key])[0] + assert not cache_value + + entity.foo = 43 + entity.put(use_global_cache=False) + + cache_value = global_cache.get([cache_key])[0] + assert not cache_value + + +@pytest.mark.skipif(not USE_REDIS_CACHE, reason="Redis is not configured") +def test_insert_entity_with_redis_cache(dispose_of, redis_context): + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StringProperty() + + entity = SomeKind(foo=42, bar="none") + key = entity.put() + dispose_of(key._key) + cache_key = _cache.global_cache_key(key._key) + cache_value = redis_context.global_cache.redis.get(cache_key) + assert not cache_value + + retrieved = key.get() + assert retrieved.foo == 42 + assert retrieved.bar == "none" + + cache_value = redis_context.global_cache.redis.get(cache_key) + assert cache_value + assert not _cache.is_locked_value(cache_value) + + entity.foo = 43 + entity.put() + + cache_value = redis_context.global_cache.redis.get(cache_key) + assert not cache_value + + +@pytest.mark.skipif(not USE_MEMCACHE, reason="Memcache is not configured") +def test_insert_entity_with_memcache(dispose_of, memcache_context): + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StringProperty() + + entity = SomeKind(foo=42, bar="none") + key = entity.put() + dispose_of(key._key) + cache_key = _cache.global_cache_key(key._key) + cache_key = global_cache_module.MemcacheCache._key(cache_key) + cache_value = memcache_context.global_cache.client.get(cache_key) + assert not cache_value + + retrieved = key.get() + assert retrieved.foo == 42 + assert retrieved.bar == "none" + + cache_value = memcache_context.global_cache.client.get(cache_key) + assert cache_value + assert not _cache.is_locked_value(cache_value) + + entity.foo = 43 + entity.put() + + cache_value = memcache_context.global_cache.client.get(cache_key) + assert not cache_value + + +@pytest.mark.usefixtures("client_context") +def test_update_entity(ds_entity): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=42, bar="none") + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StringProperty() + + key = ndb.Key(KIND, entity_id) + entity = key.get() + entity.foo = 56 + entity.bar = "high" + assert entity.put() == key + + retrieved = key.get() + assert retrieved.foo == 56 + assert retrieved.bar == "high" + + +@pytest.mark.usefixtures("client_context") +def test_insert_entity_in_transaction(dispose_of): + commit_callback = mock.Mock() + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StringProperty() + + def save_entity(): + ndb.get_context().call_on_commit(commit_callback) + entity = SomeKind(foo=42, bar="none") + key = entity.put() + dispose_of(key._key) + return key + + key = ndb.transaction(save_entity) + retrieved = key.get() + assert retrieved.foo == 42 + assert retrieved.bar == "none" + commit_callback.assert_called_once_with() + + +@pytest.mark.usefixtures("client_context") +def test_update_entity_in_transaction(ds_entity): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=42, bar="none") + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StringProperty() + + def update_entity(): + key = ndb.Key(KIND, entity_id) + entity = key.get() + entity.foo = 56 + entity.bar = "high" + assert entity.put() == key + return key + + key = ndb.transaction(update_entity) + retrieved = key.get() + assert retrieved.foo == 56 + assert retrieved.bar == "high" + + +@pytest.mark.usefixtures("client_context") +def test_parallel_transactions(): + def task(delay): + @ndb.tasklet + def callback(): + transaction = ndb.get_context().transaction + yield ndb.sleep(delay) + assert ndb.get_context().transaction == transaction + raise ndb.Return(transaction) + + return callback + + future1 = ndb.transaction_async(task(0.1)) + future2 = ndb.transaction_async(task(0.06)) + ndb.wait_all((future1, future2)) + assert future1.get_result() != future2.get_result() + + +@pytest.mark.usefixtures("client_context") +def test_delete_entity(ds_entity): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=42) + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + key = ndb.Key(KIND, entity_id) + assert key.get().foo == 42 + + assert key.delete() is None + assert key.get() is None + assert key.delete() is None + + +def test_delete_entity_with_caching(ds_entity, client_context): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=42) + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + client_context.set_cache_policy(None) # Use default + + key = ndb.Key(KIND, entity_id) + assert key.get().foo == 42 + + assert key.delete() is None + assert key.get() is None + assert key.delete() is None + + +def test_delete_entity_with_global_cache(ds_entity, client_context): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=42) + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + key = ndb.Key(KIND, entity_id) + cache_key = _cache.global_cache_key(key._key) + global_cache = global_cache_module._InProcessGlobalCache() + + with client_context.new(global_cache=global_cache).use(): + assert key.get().foo == 42 + cache_value = global_cache.get([cache_key])[0] + assert cache_value + assert not _cache.is_locked_value(cache_value) + + assert key.delete() is None + cache_value = global_cache.get([cache_key])[0] + assert not cache_value + + # This is py27 behavior. Not entirely sold on leaving _LOCKED value for + # Datastore misses. + assert key.get() is None + cache_value = global_cache.get([cache_key])[0] + assert _cache.is_locked_value(cache_value) + + +@pytest.mark.skipif(not USE_REDIS_CACHE, reason="Redis is not configured") +def test_delete_entity_with_redis_cache(ds_entity, redis_context): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=42) + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + key = ndb.Key(KIND, entity_id) + cache_key = _cache.global_cache_key(key._key) + + assert key.get().foo == 42 + cache_value = redis_context.global_cache.redis.get(cache_key) + assert cache_value + assert not _cache.is_locked_value(cache_value) + + assert key.delete() is None + cache_value = redis_context.global_cache.redis.get(cache_key) + assert not cache_value + + # This is py27 behavior. Not entirely sold on leaving _LOCKED value for + # Datastore misses. + assert key.get() is None + cache_value = redis_context.global_cache.redis.get(cache_key) + assert _cache.is_locked_value(cache_value) + + +@pytest.mark.skipif(not USE_MEMCACHE, reason="Memcache is not configured") +def test_delete_entity_with_memcache(ds_entity, memcache_context): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=42) + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + key = ndb.Key(KIND, entity_id) + cache_key = _cache.global_cache_key(key._key) + cache_key = global_cache_module.MemcacheCache._key(cache_key) + + assert key.get().foo == 42 + cache_value = memcache_context.global_cache.client.get(cache_key) + assert cache_value + assert not _cache.is_locked_value(cache_value) + + assert key.delete() is None + cache_value = memcache_context.global_cache.client.get(cache_key) + assert not cache_value + + # This is py27 behavior. Not entirely sold on leaving _LOCKED value for + # Datastore misses. + assert key.get() is None + cache_value = memcache_context.global_cache.client.get(cache_key) + assert _cache.is_locked_value(cache_value) + + +@pytest.mark.usefixtures("client_context") +def test_delete_entity_in_transaction(ds_entity): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=42) + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + key = ndb.Key(KIND, entity_id) + assert key.get().foo == 42 + + def delete_entity(): + assert key.delete() is None + assert key.get().foo == 42 # not deleted until commit + + ndb.transaction(delete_entity) + assert key.get() is None + + +def test_delete_entity_in_transaction_with_global_cache(client_context, ds_entity): + """Regression test for #426 + + https://github.com/googleapis/python-ndb/issues/426 + """ + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=42) + + global_cache = global_cache_module._InProcessGlobalCache() + with client_context.new(global_cache=global_cache).use(): + key = ndb.Key(KIND, entity_id) + assert key.get().foo == 42 + + ndb.transaction(key.delete) + assert key.get() is None + + +@pytest.mark.usefixtures("client_context") +def test_delete_entity_in_transaction_then_rollback(ds_entity): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=42) + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + key = ndb.Key(KIND, entity_id) + assert key.get().foo == 42 + + def delete_entity(): + assert key.delete() is None + raise Exception("Spurious error") + + with pytest.raises(Exception): + ndb.transaction(delete_entity) + + assert key.get().foo == 42 + + +@pytest.mark.usefixtures("client_context") +def test_allocate_ids(): + class SomeKind(ndb.Model): + pass + + keys = SomeKind.allocate_ids(5) + assert len(keys) == 5 + + for key in keys: + assert key.id() + assert key.get() is None + + +@pytest.mark.usefixtures("client_context") +def test_get_by_id(ds_entity): + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=42) + + key = ndb.Key(KIND, entity_id) + assert key.get().foo == 42 + + entity = SomeKind.get_by_id(entity_id) + assert entity.foo == 42 + + +@pytest.mark.usefixtures("client_context") +def test_get_or_insert_get(ds_entity): + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + name = "Inigo Montoya" + assert SomeKind.get_by_id(name) is None + + ds_entity(KIND, name, foo=42) + entity = SomeKind.get_or_insert(name, foo=21) + assert entity.foo == 42 + + +@pytest.mark.usefixtures("client_context") +def test_get_or_insert_insert(dispose_of): + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + name = "Inigo Montoya" + assert SomeKind.get_by_id(name) is None + + entity = SomeKind.get_or_insert(name, foo=21) + dispose_of(entity._key._key) + assert entity.foo == 21 + + +@pytest.mark.usefixtures("client_context") +def test_get_or_insert_in_transaction(dispose_of): + """Regression test for #433 + + https://github.com/googleapis/python-ndb/issues/433 + """ + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + name = "Inigo Montoya" + assert SomeKind.get_by_id(name) is None + + @ndb.transactional() + def do_the_thing(foo): + entity = SomeKind.get_or_insert(name, foo=foo) + return entity + + entity = do_the_thing(42) + dispose_of(entity._key._key) + assert entity.foo == 42 + + entity = do_the_thing(21) + assert entity.foo == 42 + + +def test_get_by_id_default_namespace_when_context_namespace_is_other( + client_context, dispose_of, other_namespace +): + """Regression test for #535. + + https://github.com/googleapis/python-ndb/issues/535 + """ + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + entity1 = SomeKind(foo=1, id="x", namespace="") + entity1.put() + dispose_of(entity1.key._key) + + with client_context.new(namespace=other_namespace).use(): + result = SomeKind.get_by_id("x", namespace="") + + assert result is not None + assert result.foo == 1 + + +def test_get_or_insert_default_namespace_when_context_namespace_is_other( + client_context, dispose_of, other_namespace +): + """Regression test for #535. + + https://github.com/googleapis/python-ndb/issues/535 + """ + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + with client_context.new(namespace=other_namespace).use(): + SomeKind.get_or_insert("x", namespace="", foo=1) + result = SomeKind.get_by_id("x", namespace="") + + assert result is not None + assert result.foo == 1 + + +@pytest.mark.usefixtures("client_context") +def test_insert_entity_with_structured_property(dispose_of): + class OtherKind(ndb.Model): + one = ndb.StringProperty() + two = ndb.StringProperty() + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StructuredProperty(OtherKind) + + entity = SomeKind(foo=42, bar=OtherKind(one="hi", two="mom")) + key = entity.put() + dispose_of(key._key) + + retrieved = key.get() + assert retrieved.foo == 42 + assert retrieved.bar.one == "hi" + assert retrieved.bar.two == "mom" + + assert isinstance(retrieved.bar, OtherKind) + + +def test_insert_entity_with_structured_property_legacy_data( + client_context, dispose_of, ds_client +): + class OtherKind(ndb.Model): + one = ndb.StringProperty() + two = ndb.StringProperty() + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StructuredProperty(OtherKind) + + with client_context.new(legacy_data=True).use(): + entity = SomeKind(foo=42, bar=OtherKind(one="hi", two="mom")) + key = entity.put() + dispose_of(key._key) + + retrieved = key.get() + assert retrieved.foo == 42 + assert retrieved.bar.one == "hi" + assert retrieved.bar.two == "mom" + + assert isinstance(retrieved.bar, OtherKind) + + ds_entity = ds_client.get(key._key) + assert ds_entity["foo"] == 42 + assert ds_entity["bar.one"] == "hi" + assert ds_entity["bar.two"] == "mom" + + +@pytest.mark.usefixtures("client_context") +def test_retrieve_entity_with_legacy_structured_property(ds_entity): + class OtherKind(ndb.Model): + one = ndb.StringProperty() + two = ndb.StringProperty() + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StructuredProperty(OtherKind) + + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, **{"foo": 42, "bar.one": "hi", "bar.two": "mom"}) + + key = ndb.Key(KIND, entity_id) + retrieved = key.get() + assert retrieved.foo == 42 + assert retrieved.bar.one == "hi" + assert retrieved.bar.two == "mom" + + assert isinstance(retrieved.bar, OtherKind) + + +@pytest.mark.usefixtures("client_context") +def test_retrieve_entity_with_legacy_repeated_structured_property(ds_entity): + class OtherKind(ndb.Model): + one = ndb.StringProperty() + two = ndb.StringProperty() + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StructuredProperty(OtherKind, repeated=True) + + entity_id = test_utils.system.unique_resource_id() + ds_entity( + KIND, + entity_id, + **{"foo": 42, "bar.one": ["hi", "hello"], "bar.two": ["mom", "dad"]} + ) + + key = ndb.Key(KIND, entity_id) + retrieved = key.get() + assert retrieved.foo == 42 + assert retrieved.bar[0].one == "hi" + assert retrieved.bar[0].two == "mom" + assert retrieved.bar[1].one == "hello" + assert retrieved.bar[1].two == "dad" + + assert isinstance(retrieved.bar[0], OtherKind) + assert isinstance(retrieved.bar[1], OtherKind) + + +@pytest.mark.usefixtures("client_context") +def test_legacy_repeated_structured_property_w_expando( + ds_client, dispose_of, client_context +): + """Regression test for #669 + + https://github.com/googleapis/python-ndb/issues/669 + """ + + class OtherKind(ndb.Expando): + one = ndb.StringProperty() + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StructuredProperty(OtherKind, repeated=True) + + entity = SomeKind( + foo=42, + bar=[ + OtherKind(one="one-a"), + OtherKind(two="two-b"), + OtherKind(one="one-c", two="two-c"), + ], + ) + + with client_context.new(legacy_data=True).use(): + key = entity.put() + dispose_of(key._key) + + ds_entity = ds_client.get(key._key) + assert ds_entity["bar.one"] == ["one-a", None, "one-c"] + assert ds_entity["bar.two"] == [None, "two-b", "two-c"] + + retrieved = key.get() + assert retrieved.foo == 42 + assert retrieved.bar[0].one == "one-a" + assert not hasattr(retrieved.bar[0], "two") + assert retrieved.bar[1].one is None + assert retrieved.bar[1].two == "two-b" + assert retrieved.bar[2].one == "one-c" + assert retrieved.bar[2].two == "two-c" + + assert isinstance(retrieved.bar[0], OtherKind) + assert isinstance(retrieved.bar[1], OtherKind) + assert isinstance(retrieved.bar[2], OtherKind) + + +@pytest.mark.usefixtures("client_context") +def test_legacy_repeated_structured_property_w_expando_empty( + ds_client, dispose_of, client_context +): + """Regression test for #669 + + https://github.com/googleapis/python-ndb/issues/669 + """ + + class OtherKind(ndb.Expando): + one = ndb.StringProperty() + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StructuredProperty(OtherKind, repeated=True) + + entity = SomeKind(foo=42, bar=[]) + + with client_context.new(legacy_data=True).use(): + key = entity.put() + dispose_of(key._key) + + retrieved = key.get() + assert retrieved.foo == 42 + assert retrieved.bar == [] + + +@pytest.mark.usefixtures("client_context") +def test_insert_expando(dispose_of): + class SomeKind(ndb.Expando): + foo = ndb.IntegerProperty() + + entity = SomeKind(foo=42) + entity.expando_prop = "exp-value" + key = entity.put() + dispose_of(key._key) + + retrieved = key.get() + assert retrieved.foo == 42 + assert retrieved.expando_prop == "exp-value" + + +def test_insert_expando_w_legacy_structured_property(client_context, dispose_of): + """Regression test for issue #673 + + https://github.com/googleapis/python-ndb/issues/673 + """ + + class SomeKind(ndb.Expando): + foo = ndb.IntegerProperty() + + class OtherKind(ndb.Expando): + bar = ndb.StringProperty() + + with client_context.new(legacy_data=True).use(): + entity = SomeKind( + foo=42, + other=OtherKind( + bar="hi mom!", + other=OtherKind(bar="hello dad!"), + ), + ) + key = entity.put() + dispose_of(key._key) + + retrieved = key.get() + assert retrieved.foo == 42 + assert retrieved.other.bar == "hi mom!" + + # Note that the class for the subobject is lost. I tested with legacy NDB and + # this is true there as well. + assert isinstance(retrieved.other, ndb.Expando) + assert not isinstance(retrieved.other, OtherKind) + + +def test_insert_expando_w_legacy_dynamic_dict(client_context, dispose_of): + """Regression test for issue #673 + + https://github.com/googleapis/python-ndb/issues/673 + """ + + class SomeKind(ndb.Expando): + foo = ndb.IntegerProperty() + + with client_context.new(legacy_data=True).use(): + dynamic_dict_value = {"k1": {"k2": {"k3": "v1"}}, "k4": "v2"} + entity = SomeKind(foo=42, dynamic_dict_prop=dynamic_dict_value) + key = entity.put() + dispose_of(key._key) + + retrieved = key.get() + assert retrieved.foo == 42 + assert retrieved.dynamic_dict_prop.k1.k2.k3 == "v1" + assert retrieved.dynamic_dict_prop.k4 == "v2" + + +@pytest.mark.usefixtures("client_context") +def test_insert_polymodel(dispose_of): + class Animal(ndb.PolyModel): + one = ndb.StringProperty() + + class Feline(Animal): + two = ndb.StringProperty() + + class Cat(Feline): + three = ndb.StringProperty() + + entity = Cat(one="hello", two="dad", three="i'm in jail") + key = entity.put() + dispose_of(key._key) + + retrieved = key.get() + + assert isinstance(retrieved, Animal) + assert isinstance(retrieved, Cat) + assert retrieved.one == "hello" + assert retrieved.two == "dad" + assert retrieved.three == "i'm in jail" + + +@pytest.mark.usefixtures("client_context") +def test_insert_autonow_property(dispose_of): + class SomeKind(ndb.Model): + foo = ndb.StringProperty() + created_at = ndb.DateTimeProperty(indexed=True, auto_now_add=True) + updated_at = ndb.DateTimeProperty(indexed=True, auto_now=True) + + entity = SomeKind(foo="bar") + key = entity.put() + dispose_of(key._key) + + retrieved = key.get() + + assert isinstance(retrieved.created_at, datetime.datetime) + assert isinstance(retrieved.updated_at, datetime.datetime) + + +@pytest.mark.usefixtures("client_context") +def test_insert_autonow_property_with_tz(dispose_of): + """Regression test for #517 + + https://github.com/googleapis/python-ndb/issues/517 + """ + + class SomeKind(ndb.Model): + created_at = ndb.DateTimeProperty(auto_now_add=True, tzinfo=pytz.utc) + updated_at = ndb.DateTimeProperty(auto_now=True, tzinfo=pytz.utc) + + now = datetime.datetime.now(pytz.utc) + entity = SomeKind() + key = entity.put() + dispose_of(key._key) + + _assert_contemporaneous(entity.created_at, now) + _assert_contemporaneous(entity.updated_at, now) + + retrieved = key.get() + + _assert_contemporaneous(retrieved.created_at, now) + _assert_contemporaneous(retrieved.updated_at, now) + + +@pytest.mark.usefixtures("client_context") +def test_insert_datetime_property_with_tz(dispose_of): + """Regression test for #517 + + https://github.com/googleapis/python-ndb/issues/517 + """ + + class SomeKind(ndb.Model): + alarm1 = ndb.DateTimeProperty(tzinfo=pytz.utc) + alarm2 = ndb.DateTimeProperty(tzinfo=pytz.utc) + + now = datetime.datetime.now(pytz.utc) + entity = SomeKind( + alarm1=now, + alarm2=datetime.datetime.utcnow(), # naive + ) + key = entity.put() + dispose_of(key._key) + + _assert_contemporaneous(entity.alarm1, now) + _assert_contemporaneous(entity.alarm2, now) + + retrieved = key.get() + + _assert_contemporaneous(retrieved.alarm1, now) + _assert_contemporaneous(retrieved.alarm2, now) + + +@pytest.mark.usefixtures("client_context") +def test_insert_nested_autonow_property(dispose_of): + class OtherKind(ndb.Model): + created_at = ndb.DateTimeProperty(indexed=True, auto_now_add=True) + updated_at = ndb.DateTimeProperty(indexed=True, auto_now=True) + + class SomeKind(ndb.Model): + other = ndb.StructuredProperty(OtherKind) + + entity = SomeKind(other=OtherKind()) + key = entity.put() + dispose_of(key._key) + + retrieved = key.get() + + assert isinstance(retrieved.other.created_at, datetime.datetime) + assert isinstance(retrieved.other.updated_at, datetime.datetime) + + +@pytest.mark.usefixtures("client_context") +def test_uninitialized_property(dispose_of): + class SomeKind(ndb.Model): + foo = ndb.StringProperty(required=True) + + entity = SomeKind() + + with pytest.raises(ndb.exceptions.BadValueError): + entity.put() + + +@mock.patch( + "google.cloud.ndb._datastore_api.make_call", + mock.Mock(side_effect=Exception("Datastore shouldn't get called.")), +) +def test_crud_without_datastore(ds_entity, client_context): + entity_id = test_utils.system.unique_resource_id() + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StringProperty() + baz = ndb.StringProperty() + + global_cache = global_cache_module._InProcessGlobalCache() + with client_context.new(global_cache=global_cache).use() as context: + context.set_global_cache_policy(None) # Use default + context.set_datastore_policy(False) # Don't use Datastore + + key = ndb.Key(KIND, entity_id) + SomeKind(foo=42, bar="none", baz="night", _key=key).put() + + entity = key.get() + assert isinstance(entity, SomeKind) + assert entity.foo == 42 + assert entity.bar == "none" + assert entity.baz == "night" + + key.delete() + assert key.get() is None + + +@pytest.mark.usefixtures("client_context") +def test_computed_key_property(dispose_of): + """Regression test for #284. + + https://github.com/googleapis/python-ndb/issues/284 + """ + + class AModel(ndb.Model): + s_foo = ndb.StringProperty() + + class BModel(ndb.Model): + s_bar = ndb.StringProperty() + key_a = ndb.KeyProperty(kind="AModel", indexed=True) + + class CModel(ndb.Model): + s_foobar = ndb.StringProperty() + key_b = ndb.KeyProperty(kind="BModel", indexed=True) + key_a = ndb.ComputedProperty( # Issue here + lambda self: self.key_b.get().key_a if self.key_b else None, + ) + + key_a = AModel(s_foo="test").put() + dispose_of(key_a._key) + key_b = BModel(s_bar="test", key_a=key_a).put() + dispose_of(key_b._key) + key_c = CModel(s_foobar="test", key_b=key_b).put() + dispose_of(key_c._key) + + entity = key_c.get() + assert entity.key_a == key_a + assert entity.key_b == key_b + + +@pytest.mark.usefixtures("client_context") +def test_user_property(dispose_of): + class SomeKind(ndb.Model): + user = ndb.UserProperty() + + user = ndb.User("somebody@example.com", "gmail.com") + entity = SomeKind(user=user) + key = entity.put() + dispose_of(key._key) + + retrieved = key.get() + assert retrieved.user.email() == "somebody@example.com" + assert retrieved.user.auth_domain() == "gmail.com" + + +@pytest.mark.usefixtures("client_context") +def test_user_property_different_user_class(dispose_of): + class SomeKind(ndb.Model): + user = ndb.UserProperty() + + class User(object): + def email(self): + return "somebody@example.com" + + def auth_domain(self): + return "gmail.com" + + def user_id(self): + return None + + entity = SomeKind(user=User()) + key = entity.put() + dispose_of(key._key) + + retrieved = key.get() + assert retrieved.user.email() == "somebody@example.com" + assert retrieved.user.auth_domain() == "gmail.com" + + +@pytest.mark.usefixtures("client_context") +def test_repeated_empty_strings(dispose_of): + """Regression test for issue # 300. + + https://github.com/googleapis/python-ndb/issues/300 + """ + + class SomeKind(ndb.Model): + foo = ndb.StringProperty(repeated=True) + + entity = SomeKind(foo=["", ""]) + key = entity.put() + dispose_of(key._key) + + retrieved = key.get() + assert retrieved.foo == ["", ""] + + +@pytest.mark.skipif(not USE_REDIS_CACHE, reason="Redis is not configured") +@pytest.mark.usefixtures("redis_context") +def test_multi_get_weirdness_with_redis(dispose_of): + """Regression test for issue #294. + + https://github.com/googleapis/python-ndb/issues/294 + """ + + class SomeKind(ndb.Model): + foo = ndb.StringProperty() + + objects = [SomeKind(foo=str(i)) for i in range(10)] + keys = ndb.put_multi(objects) + for key in keys: + dispose_of(key._key) + ndb.get_multi(keys) + + one_object = random.choice(keys).get() + one_object.foo = "CHANGED" + one_object.put() + + objects_upd = ndb.get_multi(keys) + keys_upd = [obj.key for obj in objects_upd] + assert len(keys_upd) == len(keys) + assert len(set(keys_upd)) == len(set(keys)) + assert set(keys_upd) == set(keys) + + +@pytest.mark.usefixtures("client_context") +def test_multi_with_lots_of_keys(dispose_of): + """Regression test for issue #318. + + https://github.com/googleapis/python-ndb/issues/318 + """ + N = 1001 + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + foos = list(range(N)) + entities = [SomeKind(foo=foo) for foo in foos] + keys = ndb.put_multi(entities) + dispose_of(*(key._key for key in keys)) + assert len(keys) == N + + entities = ndb.get_multi(keys) + assert [entity.foo for entity in entities] == foos + + ndb.delete_multi(keys) + entities = ndb.get_multi(keys) + assert entities == [None] * N + + +@pytest.mark.usefixtures("client_context") +def test_allocate_a_lot_of_keys(): + N = 1001 + + class SomeKind(ndb.Model): + pass + + keys = SomeKind.allocate_ids(N) + assert len(keys) == N + + +@pytest.mark.usefixtures("client_context") +def test_delete_multi_with_transactional(dispose_of): + """Regression test for issue #271 + + https://github.com/googleapis/python-ndb/issues/271 + """ + N = 10 + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + @ndb.transactional() + def delete_them(entities): + ndb.delete_multi([entity.key for entity in entities]) + + foos = list(range(N)) + entities = [SomeKind(foo=foo) for foo in foos] + keys = ndb.put_multi(entities) + dispose_of(*(key._key for key in keys)) + + entities = ndb.get_multi(keys) + assert [entity.foo for entity in entities] == foos + + assert delete_them(entities) is None + entities = ndb.get_multi(keys) + assert entities == [None] * N + + +@pytest.mark.usefixtures("client_context") +def test_compressed_text_property(dispose_of, ds_client): + """Regression test for #277 + + https://github.com/googleapis/python-ndb/issues/277 + """ + + class SomeKind(ndb.Model): + foo = ndb.TextProperty(compressed=True) + + entity = SomeKind(foo="Compress this!") + key = entity.put() + dispose_of(key._key) + + retrieved = key.get() + assert retrieved.foo == "Compress this!" + + ds_entity = ds_client.get(key._key) + assert zlib.decompress(ds_entity["foo"]) == b"Compress this!" + + +def test_insert_entity_with_repeated_local_structured_property_legacy_data( + client_context, dispose_of, ds_client +): + """Regression test for #326 + + https://github.com/googleapis/python-ndb/issues/326 + """ + + class OtherKind(ndb.Model): + one = ndb.StringProperty() + two = ndb.StringProperty() + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.LocalStructuredProperty(OtherKind, repeated=True) + + with client_context.new(legacy_data=True).use(): + entity = SomeKind( + foo=42, + bar=[ + OtherKind(one="hi", two="mom"), + OtherKind(one="and", two="dad"), + ], + ) + key = entity.put() + dispose_of(key._key) + + retrieved = key.get() + assert retrieved.foo == 42 + assert retrieved.bar[0].one == "hi" + assert retrieved.bar[0].two == "mom" + assert retrieved.bar[1].one == "and" + assert retrieved.bar[1].two == "dad" + + assert isinstance(retrieved.bar[0], OtherKind) + assert isinstance(retrieved.bar[1], OtherKind) + + +def test_insert_structured_property_with_unindexed_subproperty_legacy_data( + client_context, dispose_of, ds_client +): + """Regression test for #341 + + https://github.com/googleapis/python-ndb/issues/341 + """ + + class OtherKind(ndb.Model): + data = ndb.BlobProperty(indexed=False) + + class SomeKind(ndb.Model): + entry = ndb.StructuredProperty(OtherKind) + + with client_context.new(legacy_data=True).use(): + entity = SomeKind(entry=OtherKind(data=b"01234567890" * 1000)) + key = entity.put() + dispose_of(key._key) + + retrieved = key.get() + assert isinstance(retrieved.entry, OtherKind) + + +@pytest.mark.usefixtures("client_context") +def test_serialization(dispose_of): + """Regression test for #384 + + https://github.com/googleapis/python-ndb/issues/384 + """ + + # This is needed because pickle can't serialize local objects + global SomeKind, OtherKind + + class OtherKind(ndb.Model): + foo = ndb.IntegerProperty() + + @classmethod + def _get_kind(cls): + return "OtherKind" + + class SomeKind(ndb.Model): + other = ndb.StructuredProperty(OtherKind) + + @classmethod + def _get_kind(cls): + return "SomeKind" + + entity = SomeKind(other=OtherKind(foo=1, namespace="Test"), namespace="Test") + key = entity.put() + dispose_of(key._key) + + retrieved = key.get() + assert retrieved.other.key is None or retrieved.other.key.id() is None + entity = pickle.loads(pickle.dumps(retrieved)) + assert entity.other.foo == 1 + + +@pytest.mark.usefixtures("client_context") +def test_custom_validator(dispose_of, ds_client): + """New feature test for #252 + + https://github.com/googleapis/python-ndb/issues/252 + """ + + def date_validator(prop, value): + return datetime.datetime.strptime(value, "%Y-%m-%d %H:%M:%S") + + class SomeKind(ndb.Model): + foo = ndb.DateTimeProperty(validator=date_validator) + + entity = SomeKind(foo="2020-08-08 1:02:03") + key = entity.put() + dispose_of(key._key) + + retrieved = key.get() + assert retrieved.foo == datetime.datetime(2020, 8, 8, 1, 2, 3) + + +def test_cache_returns_entity_if_available(dispose_of, client_context): + """Regression test for #441 + + https://github.com/googleapis/python-ndb/issues/441 + """ + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StringProperty() + + client_context.set_cache_policy(None) # Use default + + somekind = SomeKind(foo=1) + key = somekind.put() + dispose_of(key._key) + + query = ndb.Query(kind="SomeKind") + ourkind = query.get() + ourkind.bar = "confusing" + + assert somekind.bar == "confusing" + + +def test_cache_off_new_entity_created(dispose_of, client_context): + """Regression test for #441 + + https://github.com/googleapis/python-ndb/issues/441 + """ + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StringProperty() + + somekind = SomeKind(foo=1) + key = somekind.put() + dispose_of(key._key) + + query = ndb.Query(kind="SomeKind") + ourkind = query.get() + ourkind.bar = "confusing" + + assert somekind.bar is None + + +@pytest.mark.usefixtures("client_context") +def test_local_structured_property_with_polymodel(dispose_of): + """Regression test for #481 + + https://github.com/googleapis/python-ndb/issues/481 + """ + + class Base(ndb.PolyModel): + pass + + class SubKind(Base): + foo = ndb.StringProperty() + + class Container(ndb.Model): + child = ndb.LocalStructuredProperty(Base) + + entity = Container(child=SubKind(foo="bar")) + key = entity.put() + dispose_of(key._key) + + entity = entity.key.get() + assert entity.child.foo == "bar" + + +@pytest.mark.usefixtures("client_context") +def test_local_structured_property_with_inheritance(dispose_of): + """Regression test for #523 + + https://github.com/googleapis/python-ndb/issues/523 + """ + + class Base(ndb.Model): + pass + + class SubKind(Base): + foo = ndb.StringProperty() + + class Container(ndb.Model): + children = ndb.LocalStructuredProperty(Base, repeated=True) + + entity = Container() + + subkind = SubKind(foo="bar") + entity.children.append(subkind) + key = entity.put() + + dispose_of(key._key) + + entity = entity.key.get() + assert isinstance(entity.children[0], Base) + + +def test_structured_property_with_nested_compressed_json_property_using_legacy_format( + client_context, dispose_of +): + """Regression test for #602 + + https://github.com/googleapis/python-ndb/issues/602 + """ + + class OtherKind(ndb.Model): + data = ndb.JsonProperty(compressed=True) + + class SomeKind(ndb.Model): + sub_model = ndb.StructuredProperty(OtherKind) + + with client_context.new(legacy_data=True).use(): + model = SomeKind(sub_model=OtherKind(data={"test": 1})) + key = model.put() + dispose_of(key._key) + + assert key.get().sub_model.data["test"] == 1 + + +def test_put_updates_cache(client_context, dispose_of): + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + client_context.set_cache_policy(None) # Use default + + entity = SomeKind(foo=42) + key = entity.put() + assert len(client_context.cache) == 1 + dispose_of(key._key) + + +def test_put_with_use_cache_true_updates_cache(client_context, dispose_of): + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + client_context.set_cache_policy(None) # Use default + + entity = SomeKind(foo=42) + key = entity.put(use_cache=True) + assert len(client_context.cache) == 1 + assert client_context.cache[key] is entity + + dispose_of(key._key) + + +def test_put_with_use_cache_false_does_not_update_cache(client_context, dispose_of): + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + client_context.set_cache_policy(None) # Use default + + entity = SomeKind(foo=42) + key = entity.put(use_cache=False) + assert len(client_context.cache) == 0 + + dispose_of(key._key) diff --git a/packages/google-cloud-ndb/tests/system/test_metadata.py b/packages/google-cloud-ndb/tests/system/test_metadata.py new file mode 100644 index 000000000000..3d0eee610401 --- /dev/null +++ b/packages/google-cloud-ndb/tests/system/test_metadata.py @@ -0,0 +1,312 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +System tests for metadata. +""" +import pytest + +from importlib import reload + +from google.cloud import ndb + +from test_utils import retry + + +_retry_assertion_errors = retry.RetryErrors(AssertionError) + + +@pytest.mark.usefixtures("client_context") +def test_kind_metadata(dispose_of, database_id): + # ndb.Model._kind_map gets reset in-between parameterized test runs, which results in failed kind lookups for the + # Kind metadata when we query later. Importing the metadata module has the effect of priming the kind map, + # so force a reload here to retrigger it. + from google.cloud.ndb import metadata + + reload(metadata) + + class AnyKind(ndb.Model): + foo = ndb.IntegerProperty() + + class MyKind(ndb.Model): + bar = ndb.StringProperty() + + entity1 = AnyKind(foo=1, id="x", database=database_id, namespace="_test_namespace_") + entity1.put() + dispose_of(entity1.key._key) + + entity2 = MyKind( + bar="x", id="x", database=database_id, namespace="_test_namespace_" + ) + entity2.put() + dispose_of(entity2.key._key) + + @_retry_assertion_errors + def query_metadata(): + query = ndb.Query( + kind=ndb.metadata.Kind.KIND_NAME, namespace="_test_namespace_" + ) # database is implicit + results = query.fetch() + kinds = [result.kind_name for result in results] + assert all(kind in kinds for kind in ["AnyKind", "MyKind"]) + + query_metadata() + + +@pytest.mark.usefixtures("client_context") +def test_get_kinds(dispose_of): + from google.cloud.ndb.metadata import get_kinds + + class AnyKind(ndb.Model): + foo = ndb.IntegerProperty() + + class MyKind(ndb.Model): + bar = ndb.StringProperty() + + class OtherKind(ndb.Model): + baz = ndb.IntegerProperty() + + class SomeKind(ndb.Model): + qux = ndb.StringProperty() + + entity1 = AnyKind(foo=1) + entity1.put() + dispose_of(entity1.key._key) + + entity2 = MyKind(bar="a") + entity2.put() + dispose_of(entity2.key._key) + + entity3 = OtherKind(baz=2) + entity3.put() + dispose_of(entity3.key._key) + + entity4 = SomeKind(qux="a") + entity4.put() + dispose_of(entity4.key._key) + + @_retry_assertion_errors + def query_metadata(): + kinds = get_kinds() + assert all( + kind in kinds for kind in ["AnyKind", "MyKind", "OtherKind", "SomeKind"] + ) + + kinds = get_kinds(start="N") + assert all(kind in kinds for kind in ["OtherKind", "SomeKind"]) != [] + assert not any(kind in kinds for kind in ["AnyKind", "MyKind"]) + + kinds = get_kinds(end="N") + assert all(kind in kinds for kind in ["AnyKind", "MyKind"]) != [] + assert not any(kind in kinds for kind in ["OtherKind", "SomeKind"]) + + kinds = get_kinds(start="L", end="P") + assert all(kind in kinds for kind in ["MyKind", "OtherKind"]) != [] + assert not any(kind in kinds for kind in ["AnyKind", "SomeKind"]) + + query_metadata() + + +@pytest.mark.usefixtures("client_context") +def test_namespace_metadata(dispose_of): + from google.cloud.ndb.metadata import Namespace + + # Why is this not necessary for Kind? + Namespace._fix_up_properties() + + class AnyKind(ndb.Model): + foo = ndb.IntegerProperty() + + entity1 = AnyKind(foo=1, namespace="_test_namespace_") + entity1.put() + dispose_of(entity1.key._key) + + entity2 = AnyKind(foo=2, namespace="_test_namespace_2_") + entity2.put() + dispose_of(entity2.key._key) + + @_retry_assertion_errors + def query_metadata(): + query = ndb.Query(kind=Namespace.KIND_NAME) + results = query.fetch() + + names = [result.namespace_name for result in results] + assert all(name in names for name in ["_test_namespace_", "_test_namespace_2_"]) + + query_metadata() + + +@pytest.mark.usefixtures("client_context") +def test_get_namespaces(dispose_of): + from google.cloud.ndb.metadata import get_namespaces + + class AnyKind(ndb.Model): + foo = ndb.IntegerProperty() + + entity1 = AnyKind(foo=1, namespace="CoolNamespace") + entity1.put() + dispose_of(entity1.key._key) + + entity2 = AnyKind(foo=2, namespace="MyNamespace") + entity2.put() + dispose_of(entity2.key._key) + + entity3 = AnyKind(foo=3, namespace="OtherNamespace") + entity3.put() + dispose_of(entity3.key._key) + + @_retry_assertion_errors + def query_metadata(): + names = get_namespaces() + assert all( + name in names for name in ["CoolNamespace", "MyNamespace", "OtherNamespace"] + ) + + names = get_namespaces(start="L") + assert all(name in names for name in ["MyNamespace", "OtherNamspace"]) != [] + + names = get_namespaces(end="N") + assert all(name in names for name in ["CoolNamespace", "MyNamespace"]) != [] + + names = get_namespaces(start="D", end="N") + assert all(name in names for name in ["MyNamespace"]) != [] + + query_metadata() + + +@pytest.mark.usefixtures("client_context") +def test_property_metadata(dispose_of): + from google.cloud.ndb.metadata import Property + + # Why is this not necessary for Kind? + Property._fix_up_properties() + + class AnyKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StringProperty() + + entity1 = AnyKind(foo=1, bar="x") + entity1.put() + dispose_of(entity1.key._key) + + @_retry_assertion_errors + def query_metadata(): + query = ndb.Query(kind=Property.KIND_NAME) + results = query.fetch() + + properties = [ + result.property_name for result in results if result.kind_name == "AnyKind" + ] + assert properties == ["bar", "foo"] + + query_metadata() + + +@pytest.mark.usefixtures("client_context") +def test_get_properties_of_kind(dispose_of): + from google.cloud.ndb.metadata import get_properties_of_kind + + class AnyKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StringProperty() + baz = ndb.IntegerProperty() + qux = ndb.StringProperty() + + entity1 = AnyKind(foo=1, bar="x", baz=3, qux="y") + entity1.put() + dispose_of(entity1.key._key) + + @_retry_assertion_errors + def query_metadata(): + properties = get_properties_of_kind("AnyKind") + assert properties == ["bar", "baz", "foo", "qux"] + + properties = get_properties_of_kind("AnyKind", start="c") + assert properties == ["foo", "qux"] + + properties = get_properties_of_kind("AnyKind", end="e") + assert properties == ["bar", "baz"] + + properties = get_properties_of_kind("AnyKind", start="c", end="p") + assert properties == ["foo"] + + query_metadata() + + +@pytest.mark.usefixtures("client_context") +@pytest.mark.parametrize("namespace", ["DiffNamespace"]) +def test_get_properties_of_kind_different_namespace(dispose_of, namespace): + from google.cloud.ndb.metadata import get_properties_of_kind + + class AnyKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StringProperty() + baz = ndb.IntegerProperty() + qux = ndb.StringProperty() + + entity1 = AnyKind(foo=1, bar="x", baz=3, qux="y", namespace="DiffNamespace") + entity1.put() + dispose_of(entity1.key._key) + + @_retry_assertion_errors + def query_metadata(): + properties = get_properties_of_kind("AnyKind") + assert properties == ["bar", "baz", "foo", "qux"] + + properties = get_properties_of_kind("AnyKind", start="c") + assert properties == ["foo", "qux"] + + properties = get_properties_of_kind("AnyKind", end="e") + assert properties == ["bar", "baz"] + + properties = get_properties_of_kind("AnyKind", start="c", end="p") + assert properties == ["foo"] + + query_metadata() + + +@pytest.mark.usefixtures("client_context") +def test_get_representations_of_kind(dispose_of): + from google.cloud.ndb.metadata import get_representations_of_kind + + class AnyKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StringProperty() + baz = ndb.IntegerProperty() + qux = ndb.StringProperty() + + entity1 = AnyKind(foo=1, bar="x", baz=3, qux="y") + entity1.put() + dispose_of(entity1.key._key) + + @_retry_assertion_errors + def query_metadata(): + representations = get_representations_of_kind("AnyKind") + assert representations == { + "bar": ["STRING"], + "baz": ["INT64"], + "foo": ["INT64"], + "qux": ["STRING"], + } + + representations = get_representations_of_kind("AnyKind", start="c") + assert representations == {"foo": ["INT64"], "qux": ["STRING"]} + + representations = get_representations_of_kind("AnyKind", end="e") + assert representations == {"bar": ["STRING"], "baz": ["INT64"]} + + representations = get_representations_of_kind("AnyKind", start="c", end="p") + assert representations == {"foo": ["INT64"]} + + query_metadata() diff --git a/packages/google-cloud-ndb/tests/system/test_misc.py b/packages/google-cloud-ndb/tests/system/test_misc.py new file mode 100644 index 000000000000..3cb2e3d5e500 --- /dev/null +++ b/packages/google-cloud-ndb/tests/system/test_misc.py @@ -0,0 +1,524 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Difficult to classify regression tests. +""" +import os +import pickle +import threading +import time +import traceback + +import redis + +from unittest import mock + +import pytest + +import test_utils.system + +from google.api_core import exceptions as core_exceptions +from google.cloud import ndb + +from . import eventually, length_equals, KIND + +USE_REDIS_CACHE = bool(os.environ.get("REDIS_CACHE_URL")) + + +# Pickle can only pickle/unpickle global classes +class PickleOtherKind(ndb.Model): + foo = ndb.IntegerProperty() + + @classmethod + def _get_kind(cls): + return "OtherKind" + + +class PickleSomeKind(ndb.Model): + other = ndb.StructuredProperty(PickleOtherKind) + + @classmethod + def _get_kind(cls): + return "SomeKind" + + +@pytest.mark.usefixtures("client_context") +def test_pickle_roundtrip_structured_property(dispose_of): + """Regression test for Issue #281. + + https://github.com/googleapis/python-ndb/issues/281 + """ + ndb.Model._kind_map["SomeKind"] = PickleSomeKind + ndb.Model._kind_map["OtherKind"] = PickleOtherKind + + entity = PickleSomeKind(other=PickleOtherKind(foo=1)) + key = entity.put() + dispose_of(key._key) + + entity = key.get(use_cache=False) + assert entity.other.key is None or entity.other.key.id() is None + entity = pickle.loads(pickle.dumps(entity)) + assert entity.other.foo == 1 + + +@pytest.mark.usefixtures("client_context") +def test_tasklet_yield_emtpy_list(): + """Regression test for Issue #353. + + https://github.com/googleapis/python-ndb/issues/353 + """ + + @ndb.tasklet + def test_it(): + nothing = yield [] + raise ndb.Return(nothing) + + assert test_it().result() == () + + +@pytest.mark.usefixtures("client_context") +def test_transactional_composable(dispose_of): + """Regression test for Issue #366. + + https://github.com/googleapis/python-ndb/issues/366 + """ + + class OtherKind(ndb.Model): + bar = ndb.IntegerProperty() + + class SomeKind(ndb.Model): + foos = ndb.KeyProperty(repeated=True) + bar = ndb.IntegerProperty(default=42) + + others = [OtherKind(bar=bar) for bar in range(5)] + other_keys = ndb.put_multi(others) + for key in other_keys: + dispose_of(key._key) + + entity = SomeKind(foos=other_keys[1:]) + entity_key = entity.put() + dispose_of(entity_key._key) + + @ndb.transactional() + def get_entities(*keys): + entities = [] + for entity in ndb.get_multi(keys): + entities.append(entity) + if isinstance(entity, SomeKind): + entities.extend(get_foos(entity)) + + return entities + + @ndb.transactional() + def get_foos(entity): + return ndb.get_multi(entity.foos) + + results = get_entities(entity_key, other_keys[0]) + assert [result.bar for result in results] == [42, 1, 2, 3, 4, 0] + + +@pytest.mark.usefixtures("client_context") +def test_parallel_transactions(dispose_of): + """Regression test for Issue #394 + + https://github.com/googleapis/python-ndb/issues/394 + """ + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + @ndb.transactional_tasklet() + def update(id, add, delay=0): + entity = yield SomeKind.get_by_id_async(id) + foo = entity.foo + foo += add + + yield ndb.sleep(delay) + entity.foo = foo + + yield entity.put_async() + + @ndb.tasklet + def concurrent_tasks(id): + yield [ + update(id, 100), + update(id, 100, 0.01), + ] + + key = SomeKind(foo=42).put() + dispose_of(key._key) + id = key.id() + + concurrent_tasks(id).get_result() + + entity = SomeKind.get_by_id(id) + assert entity.foo == 242 + + +def test_parallel_transactions_w_context_cache(client_context, dispose_of): + """Regression test for Issue #394 + + https://github.com/googleapis/python-ndb/issues/394 + """ + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + @ndb.transactional_tasklet() + def update(id, add, delay=0): + entity = yield SomeKind.get_by_id_async(id) + foo = entity.foo + foo += add + + yield ndb.sleep(delay) + entity.foo = foo + + yield entity.put_async() + + @ndb.tasklet + def concurrent_tasks(id): + yield [ + update(id, 100), + update(id, 100, 0.01), + ] + + with client_context.new(cache_policy=None).use(): + key = SomeKind(foo=42).put() + dispose_of(key._key) + id = key.id() + + concurrent_tasks(id).get_result() + + entity = SomeKind.get_by_id(id) + assert entity.foo == 242 + + +@pytest.mark.skipif(not USE_REDIS_CACHE, reason="Redis is not configured") +@pytest.mark.usefixtures("redis_context") +def test_parallel_transactions_w_redis_cache(dispose_of): + """Regression test for Issue #394 + + https://github.com/googleapis/python-ndb/issues/394 + """ + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + @ndb.transactional_tasklet() + def update(id, add, delay=0): + entity = yield SomeKind.get_by_id_async(id) + foo = entity.foo + foo += add + + yield ndb.sleep(delay) + entity.foo = foo + + yield entity.put_async() + + @ndb.tasklet + def concurrent_tasks(id): + yield [ + update(id, 100), + update(id, 100, 0.01), + ] + + key = SomeKind(foo=42).put() + dispose_of(key._key) + id = key.id() + + SomeKind.get_by_id(id) + concurrent_tasks(id).get_result() + + entity = SomeKind.get_by_id(id) + assert entity.foo == 242 + + +def test_rollback_with_context_cache(client_context, dispose_of): + """Regression test for Issue #398 + + https://github.com/googleapis/python-ndb/issues/398 + """ + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + class SpuriousError(Exception): + pass + + @ndb.transactional() + def update(id, add, fail=False): + entity = SomeKind.get_by_id(id) + entity.foo = entity.foo + add + entity.put() + + if fail: + raise SpuriousError() + + with client_context.new(cache_policy=None).use(): + key = SomeKind(foo=42).put() + dispose_of(key._key) + id = key.id() + + update(id, 100) + + entity = SomeKind.get_by_id(id) + assert entity.foo == 142 + + try: + update(id, 100, fail=True) + except SpuriousError: + pass + + entity = SomeKind.get_by_id(id) + assert entity.foo == 142 + + +@pytest.mark.usefixtures("client_context") +def test_insert_entity_in_transaction_without_preallocating_id(dispose_of): + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StringProperty() + + def save_entity(): + # By not waiting on the Future, we don't force a call to AllocateIds + # before the transaction is committed. + SomeKind(foo=42, bar="none").put_async() + + ndb.transaction(save_entity) + + query = SomeKind.query() + eventually(query.fetch, length_equals(1)) + retrieved = query.fetch()[0] + dispose_of(retrieved._key._key) + + assert retrieved.foo == 42 + assert retrieved.bar == "none" + + +@pytest.mark.usefixtures("client_context") +def test_crosswired_property_names(ds_entity): + """Regression test for #461. + + https://github.com/googleapis/python-ndb/issues/461 + """ + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=42, bar=43) + + class SomeKind(ndb.Model): + bar = ndb.IntegerProperty(name="foo") + + key = ndb.Key(KIND, entity_id) + entity = key.get() + + assert entity.bar == 42 + + +@mock.patch("google.cloud.ndb._datastore_api.begin_transaction") +def test_do_not_disclose_cache_contents(begin_transaction, client_context): + """Regression test for #482. + + https://github.com/googleapis/python-ndb/issues/482 + """ + begin_transaction.side_effect = core_exceptions.ServiceUnavailable("Spurious Error") + + client_context.cache["hello dad"] = "i'm in jail" + + @ndb.transactional() + def callback(): + pass + + with pytest.raises(Exception) as error_info: + callback() + + error = error_info.value + message = "".join(traceback.format_exception_only(type(error), error)) + assert "hello dad" not in message + + +@pytest.mark.skipif(not USE_REDIS_CACHE, reason="Redis is not configured") +@pytest.mark.usefixtures("client_context") +def test_parallel_threads_lookup_w_redis_cache(database_id, namespace, dispose_of): + """Regression test for #496 + + https://github.com/googleapis/python-ndb/issues/496 + """ + + class MonkeyPipeline(redis.client.Pipeline): + def mset(self, mapping): + """Force a delay here to expose concurrency error.""" + time.sleep(0.05) + return super(MonkeyPipeline, self).mset(mapping) + + with mock.patch("redis.client.Pipeline", MonkeyPipeline): + client = ndb.Client(database=database_id) + global_cache = ndb.RedisCache.from_environment() + activity = {"calls": 0} + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + class LookupThread(threading.Thread): + def __init__(self, id): + super(LookupThread, self).__init__() + self.id = id + + def run(self): + context = client.context( + cache_policy=False, + global_cache=global_cache, + namespace=namespace, + ) + with context: + entity = SomeKind.get_by_id(self.id) + assert entity.foo == 42 + activity["calls"] += 1 + + key = SomeKind(foo=42).put() + dispose_of(key._key) + id = key.id() + + thread1, thread2 = LookupThread(id), LookupThread(id) + thread1.start() + thread2.start() + thread1.join() + thread2.join() + + assert activity["calls"] == 2 + + +@pytest.mark.usefixtures("client_context") +def test_non_transactional_means_no_transaction(dispose_of): + """Regression test for #552 + + https://github.com/googleapis/python-ndb/issues/552 + """ + N = 50 + + class SomeKind(ndb.Model): + pass + + class OtherKind(ndb.Model): + pass + + @ndb.tasklet + def create_entities(): + parent_keys = yield [SomeKind().put_async() for _ in range(N)] + + futures = [] + for parent_key in parent_keys: + dispose_of(parent_key._key) + futures.append(OtherKind(parent=parent_key).put_async()) + futures.append(OtherKind(parent=parent_key).put_async()) + + keys = yield futures + for key in keys: + dispose_of(key._key) + + raise ndb.Return(keys) + + @ndb.non_transactional() + @ndb.tasklet + def non_transactional_tasklet(keys): + entities = yield ndb.get_multi_async(keys) + raise ndb.Return(entities) + + @ndb.non_transactional() + @ndb.tasklet + def also_a_non_transactional_tasklet(): + entities = yield OtherKind.query().fetch_async() + raise ndb.Return(entities) + + @ndb.transactional() + def test_lookup(keys): + entities = non_transactional_tasklet(keys).result() + assert len(entities) == N * 2 + + @ndb.transactional() + def test_query(): + return also_a_non_transactional_tasklet().result() + + keys = create_entities().result() + test_lookup(keys) + eventually(test_query, length_equals(N * 2)) + + +@pytest.mark.usefixtures("client_context") +def test_legacy_local_structured_property_with_boolean(ds_entity): + """Regression test for #623, #625 + + https://github.com/googleapis/python-ndb/issues/623 + https://github.com/googleapis/python-ndb/issues/625 + """ + children = [ + b"x\x9c\xab\xe2\x96bNJ,R`\xd0b\x12`\xac\x12\xe1\xe0\x97bN\xcb\xcf\x07r9\xa5" + b"\xd832\x15r\xf3s\x15\x01u_\x07\n", + b"x\x9c\xab\xe2\x96bNJ,R`\xd0b\x12`\xa8\x12\xe7\xe0\x97bN\xcb\xcf\x07ry\xa4" + b"\xb82Rsr\xf2\x15R\x12S\x14\x01\x8e\xbf\x085", + ] + + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, children=children) + + class OtherKind(ndb.Model): + foo = ndb.StringProperty() + bar = ndb.BooleanProperty(required=True, default=True) + + class SomeKind(ndb.Model): + children = ndb.LocalStructuredProperty( + OtherKind, repeated=True, compressed=True + ) + + entity = SomeKind.get_by_id(entity_id) + + assert len(entity.children) == 2 + assert entity.children[0].foo == "hi mom!" + assert entity.children[0].bar is True + assert entity.children[1].foo == "hello dad!" + assert entity.children[1].bar is False + + entity.children.append(OtherKind(foo="i'm in jail!", bar=False)) + entity.put() + + entity = SomeKind.get_by_id(entity_id) + assert entity.children[0].foo == "hi mom!" + assert entity.children[0].bar is True + assert entity.children[1].foo == "hello dad!" + assert entity.children[1].bar is False + assert entity.children[2].foo == "i'm in jail!" + assert entity.children[2].bar is False + + +@pytest.mark.usefixtures("client_context") +def test_parent_and_child_in_default_namespace(dispose_of): + """Regression test for #661 + + https://github.com/googleapis/python-ndb/issues/661 + """ + + class SomeKind(ndb.Model): + pass + + class OtherKind(ndb.Model): + foo = ndb.IntegerProperty() + + parent = SomeKind(namespace="") + parent_key = parent.put() + dispose_of(parent_key._key) + + child = OtherKind(parent=parent_key, namespace="", foo=42) + child_key = child.put() + dispose_of(child_key._key) + + assert OtherKind.query(ancestor=parent_key).get().foo == 42 diff --git a/packages/google-cloud-ndb/tests/system/test_query.py b/packages/google-cloud-ndb/tests/system/test_query.py new file mode 100644 index 000000000000..8e40acb3c0e4 --- /dev/null +++ b/packages/google-cloud-ndb/tests/system/test_query.py @@ -0,0 +1,2131 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +System tests for queries. +""" + +import datetime +import functools +import operator +import uuid + +import pytest +import pytz + +import test_utils.system + +from google.api_core import exceptions as core_exceptions +from google.cloud import ndb +from google.cloud.datastore import key as ds_key_module + +from . import KIND, eventually, equals, length_equals + + +@pytest.mark.usefixtures("client_context") +def test_fetch_all_of_a_kind(ds_entity): + for i in range(5): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=i) + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + query = SomeKind.query() + results = eventually(query.fetch, length_equals(5)) + + results = sorted(results, key=operator.attrgetter("foo")) + assert [entity.foo for entity in results] == [0, 1, 2, 3, 4] + + +@pytest.mark.usefixtures("client_context") +def test_fetch_w_absurdly_short_timeout(ds_entity): + for i in range(5): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=i) + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + query = SomeKind.query() + timeout = 1e-9 # One nanosecend + with pytest.raises(Exception) as error_context: + query.fetch(timeout=timeout) + + assert isinstance(error_context.value, core_exceptions.DeadlineExceeded) + + +@pytest.mark.usefixtures("client_context") +def test_fetch_lots_of_a_kind(dispose_of): + n_entities = 500 + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + @ndb.toplevel + def make_entities(): + entities = [SomeKind(foo=i) for i in range(n_entities)] + keys = yield [entity.put_async() for entity in entities] + raise ndb.Return(keys) + + for key in make_entities(): + dispose_of(key._key) + + query = SomeKind.query() + results = eventually(query.fetch, length_equals(n_entities)) + + results = sorted(results, key=operator.attrgetter("foo")) + assert [entity.foo for entity in results][:5] == [0, 1, 2, 3, 4] + + +@pytest.mark.usefixtures("client_context") +def test_high_limit(dispose_of): + """Regression test for Issue #236 + + https://github.com/googleapis/python-ndb/issues/236 + """ + n_entities = 500 + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + @ndb.toplevel + def make_entities(): + entities = [SomeKind(foo=i) for i in range(n_entities)] + keys = yield [entity.put_async() for entity in entities] + raise ndb.Return(keys) + + for key in make_entities(): + dispose_of(key._key) + + query = SomeKind.query() + eventually(query.fetch, length_equals(n_entities)) + results = query.fetch(limit=400) + + assert len(results) == 400 + + +@pytest.mark.usefixtures("client_context") +def test_fetch_and_immediately_cancel(dispose_of): + # Make a lot of entities so the query call won't complete before we get to + # call cancel. + n_entities = 500 + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + @ndb.toplevel + def make_entities(): + entities = [SomeKind(foo=i) for i in range(n_entities)] + keys = yield [entity.put_async() for entity in entities] + raise ndb.Return(keys) + + for key in make_entities(): + dispose_of(key._key) + + query = SomeKind.query() + future = query.fetch_async() + future.cancel() + with pytest.raises(ndb.exceptions.Cancelled): + future.result() + + +@pytest.mark.usefixtures("client_context") +def test_ancestor_query(ds_entity): + root_id = test_utils.system.unique_resource_id() + ds_entity(KIND, root_id, foo=-1) + for i in range(5): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, root_id, KIND, entity_id, foo=i) + + another_id = test_utils.system.unique_resource_id() + ds_entity(KIND, another_id, foo=42) + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + query = SomeKind.query(ancestor=ndb.Key(KIND, root_id)) + results = query.fetch() + + results = sorted(results, key=operator.attrgetter("foo")) + assert [entity.foo for entity in results] == [-1, 0, 1, 2, 3, 4] + + +def test_ancestor_query_with_namespace(client_context, dispose_of, other_namespace): + class Dummy(ndb.Model): + foo = ndb.StringProperty(default="") + + entity1 = Dummy(foo="bar", namespace="xyz") + parent_key = entity1.put() + dispose_of(entity1.key._key) + + entity2 = Dummy(foo="child", parent=parent_key, namespace=None) + entity2.put() + dispose_of(entity2.key._key) + + entity3 = Dummy(foo="childless", namespace="xyz") + entity3.put() + dispose_of(entity3.key._key) + + with client_context.new(namespace=other_namespace).use(): + query = Dummy.query(ancestor=parent_key, namespace="xyz") + results = query.fetch() + + assert results[0].foo == "bar" + assert results[1].foo == "child" + + +def test_ancestor_query_with_default_namespace( + client_context, dispose_of, other_namespace +): + class Dummy(ndb.Model): + foo = ndb.StringProperty(default="") + + entity1 = Dummy(foo="bar", namespace="") + parent_key = entity1.put() + dispose_of(entity1.key._key) + + entity2 = Dummy(foo="child", parent=parent_key) + entity2.put() + dispose_of(entity2.key._key) + + entity3 = Dummy(foo="childless", namespace="") + entity3.put() + dispose_of(entity3.key._key) + + with client_context.new(namespace=other_namespace).use(): + query = Dummy.query(ancestor=parent_key, namespace="") + results = query.fetch() + + assert results[0].foo == "bar" + assert results[1].foo == "child" + + +@pytest.mark.usefixtures("client_context") +def test_projection(ds_entity): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=12, bar="none") + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=21, bar="naan") + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StringProperty() + + query = SomeKind.query(projection=("foo",)) + results = eventually(query.fetch, length_equals(2)) + + results = sorted(results, key=operator.attrgetter("foo")) + + assert results[0].foo == 12 + with pytest.raises(ndb.UnprojectedPropertyError): + results[0].bar + + assert results[1].foo == 21 + with pytest.raises(ndb.UnprojectedPropertyError): + results[1].bar + + +@pytest.mark.usefixtures("client_context") +def test_projection_datetime(ds_entity): + """Regression test for Issue #261 + + https://github.com/googleapis/python-ndb/issues/261 + """ + entity_id = test_utils.system.unique_resource_id() + ds_entity( + KIND, + entity_id, + foo=datetime.datetime(2010, 5, 12, 2, 42, tzinfo=pytz.UTC), + ) + entity_id = test_utils.system.unique_resource_id() + ds_entity( + KIND, + entity_id, + foo=datetime.datetime(2010, 5, 12, 2, 43, tzinfo=pytz.UTC), + ) + + class SomeKind(ndb.Model): + foo = ndb.DateTimeProperty() + bar = ndb.StringProperty() + + query = SomeKind.query(projection=("foo",)) + results = eventually(query.fetch, length_equals(2)) + + results = sorted(results, key=operator.attrgetter("foo")) + + assert results[0].foo == datetime.datetime(2010, 5, 12, 2, 42) + assert results[1].foo == datetime.datetime(2010, 5, 12, 2, 43) + + +@pytest.mark.usefixtures("client_context") +def test_projection_with_fetch_and_property(ds_entity): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=12, bar="none") + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=21, bar="naan") + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StringProperty() + + query = SomeKind.query() + eventually(query.fetch, length_equals(2)) + + results = query.fetch(projection=(SomeKind.foo,)) + results = sorted(results, key=operator.attrgetter("foo")) + + assert results[0].foo == 12 + with pytest.raises(ndb.UnprojectedPropertyError): + results[0].bar + + assert results[1].foo == 21 + with pytest.raises(ndb.UnprojectedPropertyError): + results[1].bar + + +@pytest.mark.usefixtures("client_context") +def test_distinct_on(ds_entity): + for i in range(6): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=i % 2, bar="none") + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StringProperty() + + query = SomeKind.query(distinct_on=("foo",)) + eventually(SomeKind.query().fetch, length_equals(6)) + + results = query.fetch() + results = sorted(results, key=operator.attrgetter("foo")) + + assert results[0].foo == 0 + assert results[0].bar == "none" + + assert results[1].foo == 1 + assert results[1].bar == "none" + + +@pytest.mark.usefixtures("client_context") +def test_namespace(dispose_of, other_namespace): + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StringProperty() + + entity1 = SomeKind(foo=1, bar="a", id="x", namespace=other_namespace) + entity1.put() + dispose_of(entity1.key._key) + + entity2 = SomeKind(foo=2, bar="b", id="x") + entity2.put() + dispose_of(entity2.key._key) + + eventually(SomeKind.query().fetch, length_equals(1)) + + query = SomeKind.query(namespace=other_namespace) + results = eventually(query.fetch, length_equals(1)) + + assert results[0].foo == 1 + assert results[0].bar == "a" + assert results[0].key.namespace() == other_namespace + + +def test_namespace_set_on_client_with_id(dispose_of, database_id, other_namespace): + """Regression test for #337 + + https://github.com/googleapis/python-ndb/issues/337 + """ + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StringProperty() + + client = ndb.Client(namespace=other_namespace, database=database_id) + with client.context(cache_policy=False): + id = test_utils.system.unique_resource_id() + entity1 = SomeKind(id=id, foo=1, bar="a") + key = entity1.put() + dispose_of(key._key) + assert key.namespace() == other_namespace + + results = eventually(SomeKind.query().fetch, length_equals(1)) + + assert results[0].foo == 1 + assert results[0].bar == "a" + assert results[0].key.namespace() == other_namespace + + +def test_query_default_namespace_when_context_namespace_is_other( + client_context, dispose_of, other_namespace +): + """Regression test for #476. + + https://github.com/googleapis/python-ndb/issues/476 + """ + unique_id = str(uuid.uuid4()) + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StringProperty() + discriminator = ndb.StringProperty(default=unique_id) + + entity1 = SomeKind(foo=1, bar="a", id="x", namespace=other_namespace) + entity1.put() + dispose_of(entity1.key._key) + + entity2 = SomeKind(foo=2, bar="b", id="x", namespace="") + entity2.put() + dispose_of(entity2.key._key) + + eventually(SomeKind.query(namespace=other_namespace).fetch, length_equals(1)) + + with client_context.new(namespace=other_namespace).use(): + query = SomeKind.query(namespace="").filter(SomeKind.discriminator == unique_id) + results = eventually(query.fetch, length_equals(1)) + + assert results[0].foo == 2 + assert results[0].bar == "b" + assert results[0].key.namespace() is None + + +@pytest.mark.usefixtures("client_context") +def test_filter_equal(ds_entity): + for i in range(5): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=i) + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + eventually(SomeKind.query().fetch, length_equals(5)) + + query = SomeKind.query(SomeKind.foo == 2) + results = query.fetch() + assert results[0].foo == 2 + + +@pytest.mark.usefixtures("client_context") +def test_filter_not_equal(ds_entity): + for i in range(5): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=i) + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + eventually(SomeKind.query().fetch, length_equals(5)) + + query = SomeKind.query(SomeKind.foo != 2) + results = query.fetch() + results = sorted(results, key=operator.attrgetter("foo")) + assert [entity.foo for entity in results] == [0, 1, 3, 4] + + +@pytest.mark.usefixtures("client_context") +def test_filter_or(dispose_of): + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StringProperty() + + @ndb.toplevel + def make_entities(): + keys = yield ( + SomeKind(foo=1, bar="a").put_async(), + SomeKind(foo=2, bar="b").put_async(), + SomeKind(foo=1, bar="c").put_async(), + ) + for key in keys: + dispose_of(key._key) + + make_entities() + eventually(SomeKind.query().fetch, length_equals(3)) + + query = SomeKind.query(ndb.OR(SomeKind.foo == 1, SomeKind.bar == "c")) + results = query.fetch() + results = sorted(results, key=operator.attrgetter("bar")) + assert [entity.bar for entity in results] == ["a", "c"] + + +@pytest.mark.usefixtures("client_context") +def test_order_by_ascending(ds_entity): + for i in reversed(range(5)): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=i) + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + query = SomeKind.query().order(SomeKind.foo) + results = eventually(query.fetch, length_equals(5)) + + assert [entity.foo for entity in results] == [0, 1, 2, 3, 4] + + +@pytest.mark.usefixtures("client_context") +def test_order_by_descending(ds_entity): + for i in range(5): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=i) + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + # query = SomeKind.query() # Not implemented yet + query = SomeKind.query().order(-SomeKind.foo) + results = eventually(query.fetch, length_equals(5)) + assert len(results) == 5 + + assert [entity.foo for entity in results] == [4, 3, 2, 1, 0] + + +@pytest.mark.usefixtures("client_context") +def test_order_by_with_or_filter(dispose_of): + """ + Checking to make sure ordering is preserved when merging different + results sets. + """ + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StringProperty() + + @ndb.toplevel + def make_entities(): + keys = yield ( + SomeKind(foo=0, bar="a").put_async(), + SomeKind(foo=1, bar="b").put_async(), + SomeKind(foo=2, bar="a").put_async(), + SomeKind(foo=3, bar="b").put_async(), + ) + for key in keys: + dispose_of(key._key) + + make_entities() + query = SomeKind.query(ndb.OR(SomeKind.bar == "a", SomeKind.bar == "b")) + query = query.order(SomeKind.foo) + results = eventually(query.fetch, length_equals(4)) + + assert [entity.foo for entity in results] == [0, 1, 2, 3] + + +@pytest.mark.usefixtures("client_context") +def test_keys_only(ds_entity): + # Assuming unique resource ids are assigned in order ascending with time. + # Seems to be true so far. + entity_id1 = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id1, foo=12, bar="none") + entity_id2 = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id2, foo=21, bar="naan") + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StringProperty() + + query = SomeKind.query().order(SomeKind.key) + results = eventually(lambda: query.fetch(keys_only=True), length_equals(2)) + + assert results[0] == ndb.Key("SomeKind", entity_id1) + assert results[1] == ndb.Key("SomeKind", entity_id2) + + +@pytest.mark.usefixtures("client_context") +def test_offset_and_limit(ds_entity): + for i in range(5): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=i) + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + eventually(SomeKind.query().fetch, length_equals(5)) + + query = SomeKind.query(order_by=["foo"]) + results = query.fetch(offset=2, limit=2) + assert [entity.foo for entity in results] == [2, 3] + + +@pytest.mark.usefixtures("client_context") +def test_offset_and_limit_with_or_filter(dispose_of): + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StringProperty() + + @ndb.toplevel + def make_entities(): + keys = yield ( + SomeKind(foo=0, bar="a").put_async(), + SomeKind(foo=1, bar="b").put_async(), + SomeKind(foo=2, bar="a").put_async(), + SomeKind(foo=3, bar="b").put_async(), + SomeKind(foo=4, bar="a").put_async(), + SomeKind(foo=5, bar="b").put_async(), + ) + for key in keys: + dispose_of(key._key) + + make_entities() + eventually(SomeKind.query().fetch, length_equals(6)) + + query = SomeKind.query(ndb.OR(SomeKind.bar == "a", SomeKind.bar == "b")) + query = query.order(SomeKind.foo) + results = query.fetch(offset=1, limit=2) + + assert [entity.foo for entity in results] == [1, 2] + + +@pytest.mark.usefixtures("client_context") +def test_iter_all_of_a_kind(ds_entity): + for i in range(5): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=i) + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + query = SomeKind.query().order("foo") + results = eventually(lambda: list(query), length_equals(5)) + assert [entity.foo for entity in results] == [0, 1, 2, 3, 4] + + +@pytest.mark.usefixtures("client_context") +def test_get_first(ds_entity): + for i in range(5): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=i) + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + query = SomeKind.query().order(SomeKind.foo) + eventually(query.fetch, length_equals(5)) + assert query.get().foo == 0 + + +@pytest.mark.usefixtures("client_context") +def test_get_only(ds_entity): + for i in range(5): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=i) + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + query = SomeKind.query().order(SomeKind.foo) + eventually(query.fetch, length_equals(5)) + assert query.filter(SomeKind.foo == 2).get().foo == 2 + + +@pytest.mark.usefixtures("client_context") +def test_get_none(ds_entity): + for i in range(5): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=i) + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + query = SomeKind.query().order(SomeKind.foo) + eventually(query.fetch, length_equals(5)) + assert query.filter(SomeKind.foo == -1).get() is None + + +@pytest.mark.usefixtures("client_context") +def test_count_all(ds_entity): + for i in range(5): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=i) + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + query = SomeKind.query() + eventually(query.count, equals(5)) + + +@pytest.mark.usefixtures("client_context") +def test_count_with_limit(ds_entity): + for i in range(5): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=i) + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + query = SomeKind.query() + eventually(query.count, equals(5)) + + assert query.count(3) == 3 + + +@pytest.mark.usefixtures("client_context") +def test_count_with_filter(ds_entity): + for i in range(5): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=i) + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + query = SomeKind.query() + eventually(query.count, equals(5)) + + assert query.filter(SomeKind.foo == 2).count() == 1 + + +@pytest.mark.usefixtures("client_context") +def test_count_with_order_by_and_multiquery(ds_entity): + """Regression test for #447 + + https://github.com/googleapis/python-ndb/issues/447 + """ + for i in range(5): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=i) + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + query = SomeKind.query(order_by=[SomeKind.foo]).filter( + ndb.OR(SomeKind.foo < 100, SomeKind.foo > -1) + ) + eventually(query.count, equals(5)) + + +@pytest.mark.usefixtures("client_context") +def test_keys_only_multiquery_with_order(ds_entity): + """Regression test for #509 + + https://github.com/googleapis/python-ndb/issues/509 + """ + keys = [] + for i in range(5): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=i) + keys.append(ndb.Key(KIND, entity_id)) + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + query = ( + SomeKind.query() + .order(SomeKind.foo) + .filter(ndb.OR(SomeKind.foo < 100, SomeKind.foo > -1)) + ) + results = eventually( + functools.partial(query.fetch, keys_only=True), length_equals(5) + ) + assert keys == results + + +@pytest.mark.usefixtures("client_context") +def test_multiquery_with_projection_and_order(ds_entity): + """Regression test for #509 + + https://github.com/googleapis/python-ndb/issues/509 + """ + for i in range(5): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=i, bar="bar " + str(i)) + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StringProperty() + + query = ( + SomeKind.query(projection=[SomeKind.bar]) + .order(SomeKind.foo) + .filter(ndb.OR(SomeKind.foo < 100, SomeKind.foo > -1)) + ) + results = eventually(query.fetch, length_equals(5)) + with pytest.raises(ndb.UnprojectedPropertyError): + results[0].foo + + +@pytest.mark.usefixtures("client_context") +def test_multiquery_with_order_by_entity_key(ds_entity): + """Regression test for #629 + + https://github.com/googleapis/python-ndb/issues/629 + """ + + for i in range(5): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=i) + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + query = ( + SomeKind.query() + .order(SomeKind.key) + .filter(ndb.OR(SomeKind.foo == 4, SomeKind.foo == 3, SomeKind.foo == 1)) + ) + + results = eventually(query.fetch, length_equals(3)) + assert [entity.foo for entity in results] == [1, 3, 4] + + +@pytest.mark.usefixtures("client_context") +def test_multiquery_with_order_key_property(ds_entity, client_context): + """Regression test for #629 + + https://github.com/googleapis/python-ndb/issues/629 + """ + project = client_context.client.project + database = client_context.client.database + namespace = client_context.get_namespace() + + for i in range(5): + entity_id = test_utils.system.unique_resource_id() + ds_entity( + KIND, + entity_id, + foo=i, + bar=ds_key_module.Key( + "test_key", + i + 1, + project=project, + database=database, + namespace=namespace, + ), + ) + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.KeyProperty() + + query = ( + SomeKind.query() + .order(SomeKind.bar) + .filter(ndb.OR(SomeKind.foo == 4, SomeKind.foo == 3, SomeKind.foo == 1)) + ) + + results = eventually(query.fetch, length_equals(3)) + assert [entity.foo for entity in results] == [1, 3, 4] + + +@pytest.mark.usefixtures("client_context") +def test_count_with_multi_query(ds_entity): + for i in range(5): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=i) + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + query = SomeKind.query() + eventually(query.count, equals(5)) + + assert query.filter(SomeKind.foo != 2).count() == 4 + + +@pytest.mark.usefixtures("client_context") +def test_fetch_page(dispose_of): + page_size = 5 + n_entities = page_size * 2 + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + @ndb.toplevel + def make_entities(): + entities = [SomeKind(foo=i) for i in range(n_entities)] + keys = yield [entity.put_async() for entity in entities] + raise ndb.Return(keys) + + for key in make_entities(): + dispose_of(key._key) + + query = SomeKind.query().order(SomeKind.foo) + eventually(query.fetch, length_equals(n_entities)) + + results, cursor, more = query.fetch_page(page_size) + assert [entity.foo for entity in results] == [0, 1, 2, 3, 4] + assert more + + safe_cursor = cursor.urlsafe() + next_cursor = ndb.Cursor(urlsafe=safe_cursor) + results, cursor, more = query.fetch_page(page_size, start_cursor=next_cursor) + assert [entity.foo for entity in results] == [5, 6, 7, 8, 9] + + results, cursor, more = query.fetch_page(page_size, start_cursor=cursor) + assert not results + assert not more + + +@pytest.mark.usefixtures("client_context") +def test_fetch_page_in_query(dispose_of): + page_size = 5 + n_entities = page_size * 2 + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + @ndb.toplevel + def make_entities(): + entities = [SomeKind(foo=n_entities) for i in range(n_entities)] + keys = yield [entity.put_async() for entity in entities] + raise ndb.Return(keys) + + for key in make_entities(): + dispose_of(key._key) + + query = SomeKind.query().filter(SomeKind.foo.IN([1, 2, n_entities], server_op=True)) + eventually(query.fetch, length_equals(n_entities)) + + results, cursor, more = query.fetch_page(page_size) + assert len(results) == page_size + assert more + + safe_cursor = cursor.urlsafe() + next_cursor = ndb.Cursor(urlsafe=safe_cursor) + results, cursor, more = query.fetch_page(page_size, start_cursor=next_cursor) + assert len(results) == page_size + + results, cursor, more = query.fetch_page(page_size, start_cursor=cursor) + assert not results + assert not more + + +@pytest.mark.usefixtures("client_context") +def test_polymodel_query(ds_entity): + class Animal(ndb.PolyModel): + foo = ndb.IntegerProperty() + + class Cat(Animal): + pass + + animal = Animal(foo=1) + animal.put() + cat = Cat(foo=2) + cat.put() + + query = Animal.query() + results = eventually(query.fetch, length_equals(2)) + + results = sorted(results, key=operator.attrgetter("foo")) + assert isinstance(results[0], Animal) + assert not isinstance(results[0], Cat) + assert isinstance(results[1], Animal) + assert isinstance(results[1], Cat) + + query = Cat.query() + results = eventually(query.fetch, length_equals(1)) + + assert isinstance(results[0], Animal) + assert isinstance(results[0], Cat) + + +@pytest.mark.usefixtures("client_context") +def test_polymodel_query_class_projection(ds_entity): + """Regression test for Issue #248 + + https://github.com/googleapis/python-ndb/issues/248 + """ + + class Animal(ndb.PolyModel): + foo = ndb.IntegerProperty() + + class Cat(Animal): + pass + + animal = Animal(foo=1) + animal.put() + cat = Cat(foo=2) + cat.put() + + query = Animal.query(projection=["class", "foo"]) + results = eventually(query.fetch, length_equals(3)) + + # Mostly reproduces odd behavior of legacy code + results = sorted(results, key=operator.attrgetter("foo")) + + assert isinstance(results[0], Animal) + assert not isinstance(results[0], Cat) + assert results[0].foo == 1 + assert results[0].class_ == ["Animal"] + + assert isinstance(results[1], Animal) + assert not isinstance(results[1], Cat) + assert results[1].foo == 2 + assert results[1].class_ == ["Animal"] + + assert isinstance(results[2], Animal) + assert isinstance(results[2], Cat) # This would be False in legacy + assert results[2].foo == 2 + assert results[2].class_ == ["Cat"] + + +@pytest.mark.usefixtures("client_context") +def test_query_repeated_property(ds_entity): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=1, bar=["a", "b", "c"]) + + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=2, bar=["c", "d", "e"]) + + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=3, bar=["e", "f", "g"]) + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StringProperty(repeated=True) + + eventually(SomeKind.query().fetch, length_equals(3)) + + query = SomeKind.query().filter(SomeKind.bar == "c").order(SomeKind.foo) + results = query.fetch() + + assert len(results) == 2 + assert results[0].foo == 1 + assert results[1].foo == 2 + + +@pytest.mark.usefixtures("client_context") +def test_query_structured_property(dispose_of): + class OtherKind(ndb.Model): + one = ndb.StringProperty() + two = ndb.StringProperty() + three = ndb.StringProperty() + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StructuredProperty(OtherKind) + + @ndb.synctasklet + def make_entities(): + entity1 = SomeKind(foo=1, bar=OtherKind(one="pish", two="posh", three="pash")) + entity2 = SomeKind(foo=2, bar=OtherKind(one="pish", two="posh", three="push")) + entity3 = SomeKind( + foo=3, + bar=OtherKind(one="pish", two="moppish", three="pass the peas"), + ) + + keys = yield ( + entity1.put_async(), + entity2.put_async(), + entity3.put_async(), + ) + raise ndb.Return(keys) + + keys = make_entities() + for key in keys: + dispose_of(key._key) + + eventually(SomeKind.query().fetch, length_equals(3)) + + query = ( + SomeKind.query() + .filter(SomeKind.bar.one == "pish", SomeKind.bar.two == "posh") + .order(SomeKind.foo) + ) + + results = query.fetch() + assert len(results) == 2 + assert results[0].foo == 1 + assert results[1].foo == 2 + + +def test_query_structured_property_legacy_data(client_context, dispose_of): + class OtherKind(ndb.Model): + one = ndb.StringProperty() + two = ndb.StringProperty() + three = ndb.StringProperty() + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StructuredProperty(OtherKind) + + @ndb.synctasklet + def make_entities(): + entity1 = SomeKind(foo=1, bar=OtherKind(one="pish", two="posh", three="pash")) + entity2 = SomeKind(foo=2, bar=OtherKind(one="pish", two="posh", three="push")) + entity3 = SomeKind( + foo=3, + bar=OtherKind(one="pish", two="moppish", three="pass the peas"), + ) + + keys = yield ( + entity1.put_async(), + entity2.put_async(), + entity3.put_async(), + ) + raise ndb.Return(keys) + + with client_context.new(legacy_data=True).use(): + keys = make_entities() + for key in keys: + dispose_of(key._key) + + eventually(SomeKind.query().fetch, length_equals(3)) + query = ( + SomeKind.query() + .filter(SomeKind.bar.one == "pish", SomeKind.bar.two == "posh") + .order(SomeKind.foo) + ) + + results = query.fetch() + assert len(results) == 2 + assert results[0].foo == 1 + assert results[1].foo == 2 + + +@pytest.mark.usefixtures("client_context") +def test_query_legacy_structured_property(ds_entity): + class OtherKind(ndb.Model): + one = ndb.StringProperty() + two = ndb.StringProperty() + three = ndb.StringProperty() + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StructuredProperty(OtherKind) + + entity_id = test_utils.system.unique_resource_id() + ds_entity( + KIND, + entity_id, + **{"foo": 1, "bar.one": "pish", "bar.two": "posh", "bar.three": "pash"} + ) + + entity_id = test_utils.system.unique_resource_id() + ds_entity( + KIND, + entity_id, + **{"foo": 2, "bar.one": "pish", "bar.two": "posh", "bar.three": "push"} + ) + + entity_id = test_utils.system.unique_resource_id() + ds_entity( + KIND, + entity_id, + **{ + "foo": 3, + "bar.one": "pish", + "bar.two": "moppish", + "bar.three": "pass the peas", + } + ) + + eventually(SomeKind.query().fetch, length_equals(3)) + + query = ( + SomeKind.query() + .filter(SomeKind.bar.one == "pish", SomeKind.bar.two == "posh") + .order(SomeKind.foo) + ) + + results = query.fetch() + assert len(results) == 2 + assert results[0].foo == 1 + assert results[1].foo == 2 + + +@pytest.mark.usefixtures("client_context") +def test_query_structured_property_with_projection(dispose_of): + class OtherKind(ndb.Model): + one = ndb.StringProperty() + two = ndb.StringProperty() + three = ndb.StringProperty() + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StructuredProperty(OtherKind) + + @ndb.synctasklet + def make_entities(): + entity1 = SomeKind(foo=1, bar=OtherKind(one="pish", two="posh", three="pash")) + entity2 = SomeKind(foo=2, bar=OtherKind(one="bish", two="bosh", three="bush")) + entity3 = SomeKind( + foo=3, + bar=OtherKind(one="pish", two="moppish", three="pass the peas"), + ) + + keys = yield ( + entity1.put_async(), + entity2.put_async(), + entity3.put_async(), + ) + raise ndb.Return(keys) + + keys = make_entities() + for key in keys: + dispose_of(key._key) + + eventually(SomeKind.query().fetch, length_equals(3)) + query = ( + SomeKind.query(projection=("foo", "bar.one", "bar.two")) + .filter(SomeKind.foo < 3) + .order(SomeKind.foo) + ) + + results = query.fetch() + assert len(results) == 2 + assert results[0].foo == 1 + assert results[0].bar.one == "pish" + assert results[0].bar.two == "posh" + assert results[1].foo == 2 + assert results[1].bar.one == "bish" + assert results[1].bar.two == "bosh" + + with pytest.raises(ndb.UnprojectedPropertyError): + results[0].bar.three + + with pytest.raises(ndb.UnprojectedPropertyError): + results[1].bar.three + + +@pytest.mark.usefixtures("client_context") +def test_query_structured_property_rename_subproperty(dispose_of): + """Regression test for #449 + + https://github.com/googleapis/python-ndb/issues/449 + """ + + class OtherKind(ndb.Model): + one = ndb.StringProperty("a_different_name") + + class SomeKind(ndb.Model): + bar = ndb.StructuredProperty(OtherKind) + + key = SomeKind(bar=OtherKind(one="pish")).put() + dispose_of(key._key) + + eventually(SomeKind.query().fetch, length_equals(1)) + + query = SomeKind.query().filter(SomeKind.bar.one == "pish") + results = query.fetch() + assert len(results) == 1 + assert results[0].bar.one == "pish" + + +@pytest.mark.usefixtures("client_context") +def test_query_repeated_structured_property_with_properties(dispose_of): + class OtherKind(ndb.Model): + one = ndb.StringProperty() + two = ndb.StringProperty() + three = ndb.StringProperty() + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StructuredProperty(OtherKind, repeated=True) + + @ndb.synctasklet + def make_entities(): + entity1 = SomeKind( + foo=1, + bar=[ + OtherKind(one="pish", two="posh", three="pash"), + OtherKind(one="bish", two="bosh", three="bash"), + ], + ) + entity2 = SomeKind( + foo=2, + bar=[ + OtherKind(one="pish", two="bosh", three="bass"), + OtherKind(one="bish", two="posh", three="pass"), + ], + ) + entity3 = SomeKind( + foo=3, + bar=[ + OtherKind(one="fish", two="fosh", three="fash"), + OtherKind(one="bish", two="bosh", three="bash"), + ], + ) + + keys = yield ( + entity1.put_async(), + entity2.put_async(), + entity3.put_async(), + ) + raise ndb.Return(keys) + + keys = make_entities() + for key in keys: + dispose_of(key._key) + + eventually(SomeKind.query().fetch, length_equals(3)) + query = ( + SomeKind.query() + .filter(SomeKind.bar.one == "pish", SomeKind.bar.two == "posh") + .order(SomeKind.foo) + ) + + results = query.fetch() + assert len(results) == 2 + assert results[0].foo == 1 + assert results[1].foo == 2 + + +def test_query_repeated_structured_property_with_properties_legacy_data( + client_context, dispose_of +): + class OtherKind(ndb.Model): + one = ndb.StringProperty() + two = ndb.StringProperty() + three = ndb.StringProperty() + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StructuredProperty(OtherKind, repeated=True) + + @ndb.synctasklet + def make_entities(): + entity1 = SomeKind( + foo=1, + bar=[ + OtherKind(one="pish", two="posh", three="pash"), + OtherKind(one="bish", two="bosh", three="bash"), + ], + ) + entity2 = SomeKind( + foo=2, + bar=[ + OtherKind(one="pish", two="bosh", three="bass"), + OtherKind(one="bish", two="posh", three="pass"), + ], + ) + entity3 = SomeKind( + foo=3, + bar=[ + OtherKind(one="fish", two="fosh", three="fash"), + OtherKind(one="bish", two="bosh", three="bash"), + ], + ) + + keys = yield ( + entity1.put_async(), + entity2.put_async(), + entity3.put_async(), + ) + raise ndb.Return(keys) + + with client_context.new(legacy_data=True).use(): + keys = make_entities() + for key in keys: + dispose_of(key._key) + + eventually(SomeKind.query().fetch, length_equals(3)) + query = ( + SomeKind.query() + .filter(SomeKind.bar.one == "pish", SomeKind.bar.two == "posh") + .order(SomeKind.foo) + ) + + results = query.fetch() + assert len(results) == 2 + assert results[0].foo == 1 + assert results[1].foo == 2 + + +@pytest.mark.usefixtures("client_context") +def test_query_repeated_structured_property_with_entity_twice(dispose_of): + class OtherKind(ndb.Model): + one = ndb.StringProperty() + two = ndb.StringProperty() + three = ndb.StringProperty() + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StructuredProperty(OtherKind, repeated=True) + + @ndb.synctasklet + def make_entities(): + entity1 = SomeKind( + foo=1, + bar=[ + OtherKind(one="pish", two="posh", three="pash"), + OtherKind(one="bish", two="bosh", three="bash"), + ], + ) + entity2 = SomeKind( + foo=2, + bar=[ + OtherKind(one="bish", two="bosh", three="bass"), + OtherKind(one="pish", two="posh", three="pass"), + ], + ) + entity3 = SomeKind( + foo=3, + bar=[ + OtherKind(one="pish", two="fosh", three="fash"), + OtherKind(one="bish", two="posh", three="bash"), + ], + ) + + keys = yield ( + entity1.put_async(), + entity2.put_async(), + entity3.put_async(), + ) + raise ndb.Return(keys) + + keys = make_entities() + for key in keys: + dispose_of(key._key) + + eventually(SomeKind.query().fetch, length_equals(3)) + query = ( + SomeKind.query() + .filter( + SomeKind.bar == OtherKind(one="pish", two="posh"), + SomeKind.bar == OtherKind(two="posh", three="pash"), + ) + .order(SomeKind.foo) + ) + + results = query.fetch() + assert len(results) == 1 + assert results[0].foo == 1 + + +def test_query_repeated_structured_property_with_entity_twice_legacy_data( + client_context, dispose_of +): + class OtherKind(ndb.Model): + one = ndb.StringProperty() + two = ndb.StringProperty() + three = ndb.StringProperty() + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StructuredProperty(OtherKind, repeated=True) + + @ndb.synctasklet + def make_entities(): + entity1 = SomeKind( + foo=1, + bar=[ + OtherKind(one="pish", two="posh", three="pash"), + OtherKind(one="bish", two="bosh", three="bash"), + ], + ) + entity2 = SomeKind( + foo=2, + bar=[ + OtherKind(one="bish", two="bosh", three="bass"), + OtherKind(one="pish", two="posh", three="pass"), + ], + ) + entity3 = SomeKind( + foo=3, + bar=[ + OtherKind(one="pish", two="fosh", three="fash"), + OtherKind(one="bish", two="posh", three="bash"), + ], + ) + + keys = yield ( + entity1.put_async(), + entity2.put_async(), + entity3.put_async(), + ) + raise ndb.Return(keys) + + with client_context.new(legacy_data=True).use(): + keys = make_entities() + for key in keys: + dispose_of(key._key) + + eventually(SomeKind.query().fetch, length_equals(3)) + query = ( + SomeKind.query() + .filter( + SomeKind.bar == OtherKind(one="pish", two="posh"), + SomeKind.bar == OtherKind(two="posh", three="pash"), + ) + .order(SomeKind.foo) + ) + + results = query.fetch() + assert len(results) == 1 + assert results[0].foo == 1 + + +@pytest.mark.usefixtures("client_context") +def test_query_repeated_structured_property_with_projection(dispose_of): + class OtherKind(ndb.Model): + one = ndb.StringProperty() + two = ndb.StringProperty() + three = ndb.StringProperty() + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StructuredProperty(OtherKind, repeated=True) + + @ndb.synctasklet + def make_entities(): + entity1 = SomeKind( + foo=1, + bar=[ + OtherKind(one="angle", two="cankle", three="pash"), + OtherKind(one="bangle", two="dangle", three="bash"), + ], + ) + entity2 = SomeKind( + foo=2, + bar=[ + OtherKind(one="bish", two="bosh", three="bass"), + OtherKind(one="pish", two="posh", three="pass"), + ], + ) + entity3 = SomeKind( + foo=3, + bar=[ + OtherKind(one="pish", two="fosh", three="fash"), + OtherKind(one="bish", two="posh", three="bash"), + ], + ) + + keys = yield ( + entity1.put_async(), + entity2.put_async(), + entity3.put_async(), + ) + raise ndb.Return(keys) + + keys = make_entities() + for key in keys: + dispose_of(key._key) + + eventually(SomeKind.query().fetch, length_equals(3)) + query = SomeKind.query(projection=("bar.one", "bar.two")).filter(SomeKind.foo < 2) + + # This counter-intuitive result is consistent with Legacy NDB behavior and + # is a result of the odd way Datastore handles projection queries with + # array valued properties: + # + # https://cloud.google.com/datastore/docs/concepts/queries#projections_and_array-valued_properties + # + results = query.fetch() + assert len(results) == 4 + + def sort_key(result): + return (result.bar[0].one, result.bar[0].two) + + results = sorted(results, key=sort_key) + + assert results[0].bar[0].one == "angle" + assert results[0].bar[0].two == "cankle" + with pytest.raises(ndb.UnprojectedPropertyError): + results[0].bar[0].three + + assert results[1].bar[0].one == "angle" + assert results[1].bar[0].two == "dangle" + with pytest.raises(ndb.UnprojectedPropertyError): + results[1].bar[0].three + + assert results[2].bar[0].one == "bangle" + assert results[2].bar[0].two == "cankle" + with pytest.raises(ndb.UnprojectedPropertyError): + results[2].bar[0].three + + assert results[3].bar[0].one == "bangle" + assert results[3].bar[0].two == "dangle" + with pytest.raises(ndb.UnprojectedPropertyError): + results[3].bar[0].three + + +def test_query_repeated_structured_property_with_projection_legacy_data( + client_context, dispose_of +): + class OtherKind(ndb.Model): + one = ndb.StringProperty() + two = ndb.StringProperty() + three = ndb.StringProperty() + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StructuredProperty(OtherKind, repeated=True) + + @ndb.synctasklet + def make_entities(): + entity1 = SomeKind( + foo=1, + bar=[ + OtherKind(one="angle", two="cankle", three="pash"), + OtherKind(one="bangle", two="dangle", three="bash"), + ], + ) + entity2 = SomeKind( + foo=2, + bar=[ + OtherKind(one="bish", two="bosh", three="bass"), + OtherKind(one="pish", two="posh", three="pass"), + ], + ) + entity3 = SomeKind( + foo=3, + bar=[ + OtherKind(one="pish", two="fosh", three="fash"), + OtherKind(one="bish", two="posh", three="bash"), + ], + ) + + keys = yield ( + entity1.put_async(), + entity2.put_async(), + entity3.put_async(), + ) + raise ndb.Return(keys) + + with client_context.new(legacy_data=True).use(): + keys = make_entities() + for key in keys: + dispose_of(key._key) + + eventually(SomeKind.query().fetch, length_equals(3)) + query = SomeKind.query(projection=("bar.one", "bar.two")).filter( + SomeKind.foo < 2 + ) + + # This counter-intuitive result is consistent with Legacy NDB behavior + # and is a result of the odd way Datastore handles projection queries + # with array valued properties: + # + # https://cloud.google.com/datastore/docs/concepts/queries#projections_and_array-valued_properties + # + results = query.fetch() + assert len(results) == 4 + + def sort_key(result): + return (result.bar[0].one, result.bar[0].two) + + results = sorted(results, key=sort_key) + + assert results[0].bar[0].one == "angle" + assert results[0].bar[0].two == "cankle" + with pytest.raises(ndb.UnprojectedPropertyError): + results[0].bar[0].three + + assert results[1].bar[0].one == "angle" + assert results[1].bar[0].two == "dangle" + with pytest.raises(ndb.UnprojectedPropertyError): + results[1].bar[0].three + + assert results[2].bar[0].one == "bangle" + assert results[2].bar[0].two == "cankle" + with pytest.raises(ndb.UnprojectedPropertyError): + results[2].bar[0].three + + assert results[3].bar[0].one == "bangle" + assert results[3].bar[0].two == "dangle" + with pytest.raises(ndb.UnprojectedPropertyError): + results[3].bar[0].three + + +@pytest.mark.usefixtures("client_context") +def test_query_legacy_repeated_structured_property(ds_entity): + class OtherKind(ndb.Model): + one = ndb.StringProperty() + two = ndb.StringProperty() + three = ndb.StringProperty() + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StructuredProperty(OtherKind, repeated=True) + + entity_id = test_utils.system.unique_resource_id() + ds_entity( + KIND, + entity_id, + **{ + "foo": 1, + "bar.one": ["pish", "bish"], + "bar.two": ["posh", "bosh"], + "bar.three": ["pash", "bash"], + } + ) + + entity_id = test_utils.system.unique_resource_id() + ds_entity( + KIND, + entity_id, + **{ + "foo": 2, + "bar.one": ["bish", "pish"], + "bar.two": ["bosh", "posh"], + "bar.three": ["bass", "pass"], + } + ) + + entity_id = test_utils.system.unique_resource_id() + ds_entity( + KIND, + entity_id, + **{ + "foo": 3, + "bar.one": ["pish", "bish"], + "bar.two": ["fosh", "posh"], + "bar.three": ["fash", "bash"], + } + ) + + eventually(SomeKind.query().fetch, length_equals(3)) + + query = ( + SomeKind.query() + .filter( + SomeKind.bar == OtherKind(one="pish", two="posh"), + SomeKind.bar == OtherKind(two="posh", three="pash"), + ) + .order(SomeKind.foo) + ) + + results = query.fetch() + assert len(results) == 1 + assert results[0].foo == 1 + + +@pytest.mark.usefixtures("client_context") +def test_query_legacy_repeated_structured_property_with_name(ds_entity): + class OtherKind(ndb.Model): + one = ndb.StringProperty() + two = ndb.StringProperty() + three = ndb.StringProperty() + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StructuredProperty(OtherKind, "b", repeated=True) + + entity_id = test_utils.system.unique_resource_id() + ds_entity( + KIND, + entity_id, + **{ + "foo": 1, + "b.one": ["pish", "bish"], + "b.two": ["posh", "bosh"], + "b.three": ["pash", "bash"], + } + ) + + eventually(SomeKind.query().fetch, length_equals(1)) + + query = SomeKind.query() + + results = query.fetch() + assert len(results) == 1 + assert results[0].bar[0].one == "pish" + + +@pytest.mark.usefixtures("client_context") +def test_fetch_page_with_repeated_structured_property(dispose_of): + """Regression test for Issue #254. + + https://github.com/googleapis/python-ndb/issues/254 + """ + + class OtherKind(ndb.Model): + one = ndb.StringProperty() + two = ndb.StringProperty() + three = ndb.IntegerProperty() + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + bar = ndb.StructuredProperty(OtherKind, repeated=True) + + N = 30 + + @ndb.synctasklet + def make_entities(): + futures = [ + SomeKind( + foo=i, + bar=[ + OtherKind(one="pish", two="posh", three=i % 2), + OtherKind(one="bish", two="bosh", three=i % 2), + ], + ).put_async() + for i in range(N) + ] + + keys = yield futures + raise ndb.Return(keys) + + keys = make_entities() + for key in keys: + dispose_of(key._key) + + eventually(SomeKind.query().fetch, length_equals(N)) + query = ( + SomeKind.query() + .filter( + SomeKind.bar == OtherKind(one="pish", two="posh"), + SomeKind.bar == OtherKind(two="bosh", three=0), + ) + .order(SomeKind.foo) + ) + + results, cursor, more = query.fetch_page(page_size=5) + assert [entity.foo for entity in results] == [0, 2, 4, 6, 8] + + results, cursor, more = query.fetch_page(page_size=5, start_cursor=cursor) + assert [entity.foo for entity in results] == [10, 12, 14, 16, 18] + + +@pytest.mark.usefixtures("client_context") +def test_map(dispose_of): + class SomeKind(ndb.Model): + foo = ndb.StringProperty() + ref = ndb.KeyProperty() + + class OtherKind(ndb.Model): + foo = ndb.StringProperty() + + foos = ("aa", "bb", "cc", "dd", "ee") + others = [OtherKind(foo=foo) for foo in foos] + other_keys = ndb.put_multi(others) + for key in other_keys: + dispose_of(key._key) + + things = [SomeKind(foo=foo, ref=key) for foo, key in zip(foos, other_keys)] + keys = ndb.put_multi(things) + for key in keys: + dispose_of(key._key) + + eventually(SomeKind.query().fetch, length_equals(5)) + eventually(OtherKind.query().fetch, length_equals(5)) + + @ndb.tasklet + def get_other_foo(thing): + other = yield thing.ref.get_async() + raise ndb.Return(other.foo) + + query = SomeKind.query().order(SomeKind.foo) + assert query.map(get_other_foo) == foos + + +@pytest.mark.usefixtures("client_context") +def test_map_empty_result_set(dispose_of): + class SomeKind(ndb.Model): + foo = ndb.StringProperty() + + def somefunc(x): + raise Exception("Shouldn't be called.") + + query = SomeKind.query() + assert query.map(somefunc) == () + + +@pytest.mark.usefixtures("client_context") +def test_gql(ds_entity): + for i in range(5): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=i) + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + eventually(SomeKind.query().fetch, length_equals(5)) + + query = ndb.gql("SELECT * FROM SomeKind WHERE foo = :1", 2) + results = query.fetch() + assert results[0].foo == 2 + + query = SomeKind.gql("WHERE foo = :1", 2) + results = query.fetch() + assert results[0].foo == 2 + + +@pytest.mark.filterwarnings("ignore") +@pytest.mark.usefixtures("client_context") +def test_IN(ds_entity): + for i in range(5): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=i) + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + eventually(SomeKind.query().fetch, length_equals(5)) + + query = SomeKind.gql("where foo in (2, 3)").order(SomeKind.foo) + results = query.fetch() + assert len(results) == 2 + assert results[0].foo == 2 + assert results[1].foo == 3 + + query = SomeKind.gql("where foo in :1", [2, 3]).order(SomeKind.foo) + results = query.fetch() + assert len(results) == 2 + assert results[0].foo == 2 + assert results[1].foo == 3 + + +@pytest.mark.filterwarnings("ignore") +@pytest.mark.usefixtures("client_context") +def test_IN_timestamp(ds_entity): + for i in range(5): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=datetime.datetime.fromtimestamp(i)) + + class SomeKind(ndb.Model): + foo = ndb.DateTimeProperty() + + eventually(SomeKind.query().fetch, length_equals(5)) + + t2 = datetime.datetime.fromtimestamp(2) + t3 = datetime.datetime.fromtimestamp(3) + + query = SomeKind.query(SomeKind.foo.IN((t2, t3), server_op=True)) + results = query.fetch() + assert len(results) == 2 + assert results[0].foo == t2 + assert results[1].foo == t3 + + +@pytest.mark.filterwarnings("ignore") +@pytest.mark.usefixtures("client_context") +def test_NOT_IN(ds_entity): + for i in range(5): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=i, pt=ndb.GeoPt(i, i)) + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + pt = ndb.GeoPtProperty() + + eventually(SomeKind.query().fetch, length_equals(5)) + + query = SomeKind.query(SomeKind.pt.NOT_IN([ndb.GeoPt(1, 1)])) + results = query.fetch() + assert len(results) == 4 + assert results[0].foo == 0 + assert results[1].foo == 2 + + query = SomeKind.gql("where foo not in :1", [2, 3]) + results = query.fetch() + assert len(results) == 3 + assert results[0].foo == 0 + assert results[1].foo == 1 + assert results[2].foo == 4 + + +@pytest.mark.usefixtures("client_context") +def test_projection_with_json_property(dispose_of): + """Regression test for #378 + + https://github.com/googleapis/python-ndb/issues/378 + """ + + class SomeKind(ndb.Model): + foo = ndb.JsonProperty(indexed=True) + + key = SomeKind(foo={"hi": "mom!"}).put() + dispose_of(key._key) + + eventually(SomeKind.query().fetch, length_equals(1)) + + results = SomeKind.query().fetch(projection=[SomeKind.foo]) + assert results[0].foo == {"hi": "mom!"} + + +@pytest.mark.usefixtures("client_context") +def test_DateTime(ds_entity): + for i in range(5): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=datetime.datetime(2020, i + 1, 1, 12, 0, 0)) + + class SomeKind(ndb.Model): + foo = ndb.DateTimeProperty() + + eventually(SomeKind.query().fetch, length_equals(5)) + + query = SomeKind.gql("where foo > DateTime(2020, 4, 1, 11, 0, 0)").order( + SomeKind.foo + ) + results = query.fetch() + assert len(results) == 2 + assert results[0].foo == datetime.datetime(2020, 4, 1, 12, 0, 0) + assert results[1].foo == datetime.datetime(2020, 5, 1, 12, 0, 0) + + +@pytest.mark.usefixtures("client_context") +def test_Date(ds_entity): + for i in range(5): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=datetime.datetime(2020, i + 1, 1)) + + class SomeKind(ndb.Model): + foo = ndb.DateProperty() + + eventually(SomeKind.query().fetch, length_equals(5)) + + query = SomeKind.gql("where foo > Date(2020, 3, 1)").order(SomeKind.foo) + results = query.fetch() + assert len(results) == 2 + assert results[0].foo == datetime.date(2020, 4, 1) + assert results[1].foo == datetime.date(2020, 5, 1) + + +@pytest.mark.usefixtures("client_context") +def test_Time(ds_entity): + for i in range(5): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=datetime.datetime(1970, 1, 1, i + 1, 0, 0)) + + class SomeKind(ndb.Model): + foo = ndb.TimeProperty() + + eventually(SomeKind.query().fetch, length_equals(5)) + + query = SomeKind.gql("where foo > Time(3, 0, 0)").order(SomeKind.foo) + results = query.fetch() + assert len(results) == 2 + assert results[0].foo == datetime.time(4, 0, 0) + assert results[1].foo == datetime.time(5, 0, 0) + + +@pytest.mark.usefixtures("client_context") +def test_GeoPt(ds_entity): + for i in range(5): + entity_id = test_utils.system.unique_resource_id() + ds_entity(KIND, entity_id, foo=ndb.model.GeoPt(20, i * 20)) + + class SomeKind(ndb.Model): + foo = ndb.GeoPtProperty() + + eventually(SomeKind.query().fetch, length_equals(5)) + + query = SomeKind.gql("where foo > GeoPt(20, 40)").order(SomeKind.foo) + results = query.fetch() + assert len(results) == 2 + assert results[0].foo == ndb.model.GeoPt(20, 60) + assert results[1].foo == ndb.model.GeoPt(20, 80) + + +@pytest.mark.usefixtures("client_context") +def test_Key(ds_entity, client_context): + project = client_context.client.project + database = client_context.client.database + namespace = client_context.get_namespace() + for i in range(5): + entity_id = test_utils.system.unique_resource_id() + ds_entity( + KIND, + entity_id, + foo=ds_key_module.Key( + "test_key", + i + 1, + project=project, + database=database, + namespace=namespace, + ), + ) + + class SomeKind(ndb.Model): + foo = ndb.KeyProperty() + + eventually(SomeKind.query().fetch, length_equals(5)) + + query = SomeKind.gql("where foo = Key('test_key', 3)") + results = query.fetch() + assert len(results) == 1 + assert results[0].foo == ndb.key.Key( + "test_key", 3, project=project, namespace=namespace + ) + + +@pytest.mark.usefixtures("client_context") +def test_high_offset(dispose_of): + """Regression test for Issue #392 + + https://github.com/googleapis/python-ndb/issues/392 + """ + n_entities = 1100 + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + entities = [SomeKind(id=i + 1, foo=i) for i in range(n_entities)] + keys = ndb.put_multi(entities) + for key in keys: + dispose_of(key._key) + + eventually(SomeKind.query().fetch, length_equals(n_entities)) + query = SomeKind.query(order_by=[SomeKind.foo]) + index = n_entities - 5 + result = query.fetch(offset=index, limit=1)[0] + assert result.foo == index + + +def test_uncommitted_deletes(dispose_of, client_context): + """Regression test for Issue #586 + + https://github.com/googleapis/python-ndb/issues/586 + """ + + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + parent = SomeKind(foo=41) + parent_key = parent.put() + entity = SomeKind(foo=42, parent=parent_key) + key = entity.put() + dispose_of(key._key) + eventually(SomeKind.query().fetch, length_equals(2)) + + @ndb.transactional() + def do_the_thing(key): + key.delete() # Will be cached but not committed when query runs + return SomeKind.query(SomeKind.foo == 42, ancestor=parent_key).fetch() + + with client_context.new(cache_policy=None).use(): # Use default cache policy + assert len(do_the_thing(key)) == 0 + + +def test_query_updates_cache(dispose_of, client_context): + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + entity = SomeKind(foo=42) + key = entity.put() + dispose_of(key._key) + eventually(SomeKind.query().fetch, length_equals(1)) + + with client_context.new(cache_policy=None).use(): # Use default cache policy + retrieved = SomeKind.query().get() + assert retrieved.foo == 42 + + # If there is a cache hit, we'll get back the same object, not just a copy + assert key.get() is retrieved + + +def test_query_with_explicit_use_cache_updates_cache(dispose_of, client_context): + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + entity = SomeKind(foo=42) + key = entity.put(use_cache=False) + dispose_of(key._key) + assert len(client_context.cache) == 0 + + eventually(lambda: SomeKind.query().fetch(use_cache=True), length_equals(1)) + assert len(client_context.cache) == 1 + + +def test_query_with_use_cache_false_does_not_update_cache(dispose_of, client_context): + class SomeKind(ndb.Model): + foo = ndb.IntegerProperty() + + entity = SomeKind(foo=42) + key = entity.put(use_cache=False) + dispose_of(key._key) + assert len(client_context.cache) == 0 + + eventually(lambda: SomeKind.query().fetch(use_cache=False), length_equals(1)) + assert len(client_context.cache) == 0 diff --git a/packages/google-cloud-ndb/tests/unit/__init__.py b/packages/google-cloud-ndb/tests/unit/__init__.py new file mode 100644 index 000000000000..b0c7da3d7725 --- /dev/null +++ b/packages/google-cloud-ndb/tests/unit/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/packages/google-cloud-ndb/tests/unit/models.py b/packages/google-cloud-ndb/tests/unit/models.py new file mode 100644 index 000000000000..e5156ec163a2 --- /dev/null +++ b/packages/google-cloud-ndb/tests/unit/models.py @@ -0,0 +1,28 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +This file holds ndb models for validating aspects of data loading. +""" + +from google.cloud import ndb + + +class A(ndb.Model): + some_prop = ndb.IntegerProperty() + source = ndb.StringProperty() + + +class B(ndb.Model): + sub_model = ndb.PickleProperty() diff --git a/packages/google-cloud-ndb/tests/unit/test__batch.py b/packages/google-cloud-ndb/tests/unit/test__batch.py new file mode 100644 index 000000000000..8f370706f8ec --- /dev/null +++ b/packages/google-cloud-ndb/tests/unit/test__batch.py @@ -0,0 +1,57 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +from google.cloud.ndb import _batch +from google.cloud.ndb import _eventloop + + +@pytest.mark.usefixtures("in_context") +class Test_get_batch: + def test_it(self): + options = {"foo": "bar"} + batch = _batch.get_batch(MockBatch, options) + assert batch.options is options + assert not batch.idle_called + + different_options = {"food": "barn"} + assert _batch.get_batch(MockBatch, different_options) is not batch + + assert _batch.get_batch(MockBatch) is not batch + + assert _batch.get_batch(MockBatch, options) is batch + + batch._full = True + batch2 = _batch.get_batch(MockBatch, options) + assert batch2 is not batch + assert not batch2.idle_called + + _eventloop.run() + assert batch.idle_called + assert batch2.idle_called + + +class MockBatch: + _full = False + + def __init__(self, options): + self.options = options + self.idle_called = False + + def idle_callback(self): + self.idle_called = True + + def full(self): + return self._full diff --git a/packages/google-cloud-ndb/tests/unit/test__cache.py b/packages/google-cloud-ndb/tests/unit/test__cache.py new file mode 100644 index 000000000000..c0b3e426ebf1 --- /dev/null +++ b/packages/google-cloud-ndb/tests/unit/test__cache.py @@ -0,0 +1,1149 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import warnings + +from unittest import mock + +import pytest + +from google.cloud.ndb import _cache +from google.cloud.ndb import tasklets + + +def future_result(result): + future = tasklets.Future() + future.set_result(result) + return future + + +class TestContextCache: + @staticmethod + def test_get_and_validate_valid(): + cache = _cache.ContextCache() + test_entity = mock.Mock(_key="test") + cache["test"] = test_entity + assert cache.get_and_validate("test") is test_entity + + @staticmethod + def test_get_and_validate_invalid(): + cache = _cache.ContextCache() + test_entity = mock.Mock(_key="test") + cache["test"] = test_entity + test_entity._key = "changed_key" + with pytest.raises(KeyError): + cache.get_and_validate("test") + + @staticmethod + def test_get_and_validate_none(): + cache = _cache.ContextCache() + cache["test"] = None + assert cache.get_and_validate("test") is None + + @staticmethod + def test_get_and_validate_miss(): + cache = _cache.ContextCache() + with pytest.raises(KeyError): + cache.get_and_validate("nonexistent_key") + + @staticmethod + def test___repr__(): + cache = _cache.ContextCache() + cache["hello dad"] = "i'm in jail" + assert repr(cache) == "ContextCache()" + + +class Test_GlobalCacheBatch: + @staticmethod + def test_make_call(): + batch = _cache._GlobalCacheBatch() + with pytest.raises(NotImplementedError): + batch.make_call() + + @staticmethod + def test_future_info(): + batch = _cache._GlobalCacheBatch() + with pytest.raises(NotImplementedError): + batch.future_info(None) + + @staticmethod + def test_idle_callback_exception(): + class TransientError(Exception): + pass + + error = TransientError("oops") + batch = _cache._GlobalCacheBatch() + batch.make_call = mock.Mock(side_effect=error) + future1, future2 = tasklets.Future(), tasklets.Future() + batch.futures = [future1, future2] + batch.idle_callback() + assert future1.exception() is error + assert future2.exception() is error + + +@pytest.mark.usefixtures("in_context") +@mock.patch("google.cloud.ndb._cache._global_cache") +@mock.patch("google.cloud.ndb._cache._batch") +def test_global_get(_batch, _global_cache): + batch = _batch.get_batch.return_value + future = _future_result("hi mom!") + batch.add.return_value = future + _global_cache.return_value = mock.Mock( + transient_errors=(), + strict_read=False, + spec=("transient_errors", "strict_read"), + ) + + assert _cache.global_get(b"foo").result() == "hi mom!" + _batch.get_batch.assert_called_once_with(_cache._GlobalCacheGetBatch) + batch.add.assert_called_once_with(b"foo") + + +@pytest.mark.usefixtures("in_context") +@mock.patch("google.cloud.ndb.tasklets.sleep") +@mock.patch("google.cloud.ndb._cache._global_cache") +@mock.patch("google.cloud.ndb._cache._batch") +def test_global_get_with_error_strict(_batch, _global_cache, sleep): + class TransientError(Exception): + pass + + sleep.return_value = future_result(None) + batch = _batch.get_batch.return_value + future = _future_exception(TransientError("oops")) + batch.add.return_value = future + _global_cache.return_value = mock.Mock( + transient_errors=(TransientError,), + strict_read=True, + spec=("transient_errors", "strict_read"), + ) + + with pytest.raises(TransientError): + _cache.global_get(b"foo").result() + + _batch.get_batch.assert_called_with(_cache._GlobalCacheGetBatch) + batch.add.assert_called_with(b"foo") + + +@pytest.mark.usefixtures("in_context") +@mock.patch("google.cloud.ndb.tasklets.sleep") +@mock.patch("google.cloud.ndb._cache._global_cache") +@mock.patch("google.cloud.ndb._cache._batch") +def test_global_get_with_error_strict_retry(_batch, _global_cache, sleep): + class TransientError(Exception): + pass + + sleep.return_value = future_result(None) + batch = _batch.get_batch.return_value + batch.add.side_effect = [ + _future_exception(TransientError("oops")), + future_result("hi mom!"), + ] + _global_cache.return_value = mock.Mock( + transient_errors=(TransientError,), + strict_read=True, + spec=("transient_errors", "strict_read"), + ) + + assert _cache.global_get(b"foo").result() == "hi mom!" + _batch.get_batch.assert_called_with(_cache._GlobalCacheGetBatch) + batch.add.assert_called_with(b"foo") + + +@pytest.mark.usefixtures("in_context") +@mock.patch("google.cloud.ndb._cache._global_cache") +@mock.patch("google.cloud.ndb._cache._batch") +def test_global_get_with_error_not_strict(_batch, _global_cache): + class TransientError(Exception): + pass + + batch = _batch.get_batch.return_value + future = _future_exception(TransientError("oops")) + batch.add.return_value = future + _global_cache.return_value = mock.Mock( + transient_errors=(TransientError,), + strict_read=False, + spec=("transient_errors", "strict_read"), + ) + + with warnings.catch_warnings(record=True) as logged: + assert _cache.global_get(b"foo").result() is None + assert len(logged) in [1, 2] + + _batch.get_batch.assert_called_once_with(_cache._GlobalCacheGetBatch) + batch.add.assert_called_once_with(b"foo") + + +class Test_GlobalCacheGetBatch: + @staticmethod + def test_add_and_idle_and_done_callbacks(in_context): + cache = mock.Mock() + cache.get.return_value = future_result([b"one", b"two"]) + + batch = _cache._GlobalCacheGetBatch(None) + future1 = batch.add(b"foo") + future2 = batch.add(b"bar") + future3 = batch.add(b"foo") + + assert set(batch.todo.keys()) == {b"foo", b"bar"} + assert batch.keys == [b"foo", b"bar"] + + with in_context.new(global_cache=cache).use(): + batch.idle_callback() + + cache.get.assert_called_once_with(batch.keys) + assert future1.result() == b"one" + assert future2.result() == b"two" + assert future3.result() == b"one" + + @staticmethod + def test_add_and_idle_and_done_callbacks_synchronous(in_context): + cache = mock.Mock() + cache.get.return_value = [b"one", b"two"] + + batch = _cache._GlobalCacheGetBatch(None) + future1 = batch.add(b"foo") + future2 = batch.add(b"bar") + + assert set(batch.todo.keys()) == {b"foo", b"bar"} + assert batch.keys == [b"foo", b"bar"] + + with in_context.new(global_cache=cache).use(): + batch.idle_callback() + + cache.get.assert_called_once_with(batch.keys) + assert future1.result() == b"one" + assert future2.result() == b"two" + + @staticmethod + def test_add_and_idle_and_done_callbacks_w_error(in_context): + error = Exception("spurious error") + cache = mock.Mock() + cache.get.return_value = tasklets.Future() + cache.get.return_value.set_exception(error) + + batch = _cache._GlobalCacheGetBatch(None) + future1 = batch.add(b"foo") + future2 = batch.add(b"bar") + + assert set(batch.todo.keys()) == {b"foo", b"bar"} + assert batch.keys == [b"foo", b"bar"] + + with in_context.new(global_cache=cache).use(): + batch.idle_callback() + + cache.get.assert_called_once_with(batch.keys) + assert future1.exception() is error + assert future2.exception() is error + + @staticmethod + def test_full(): + batch = _cache._GlobalCacheGetBatch(None) + assert batch.full() is False + + +@pytest.mark.usefixtures("in_context") +class Test_global_set: + @staticmethod + @mock.patch("google.cloud.ndb._cache._global_cache") + @mock.patch("google.cloud.ndb._cache._batch") + def test_without_expires(_batch, _global_cache): + batch = _batch.get_batch.return_value + future = _future_result("hi mom!") + batch.add.return_value = future + _global_cache.return_value = mock.Mock( + transient_errors=(), + strict_write=False, + spec=("transient_errors", "strict_write"), + ) + + assert _cache.global_set(b"key", b"value").result() == "hi mom!" + _batch.get_batch.assert_called_once_with(_cache._GlobalCacheSetBatch, {}) + batch.add.assert_called_once_with(b"key", b"value") + + @staticmethod + @mock.patch("google.cloud.ndb.tasklets.sleep") + @mock.patch("google.cloud.ndb._cache._global_cache") + @mock.patch("google.cloud.ndb._cache._batch") + def test_error_strict(_batch, _global_cache, sleep): + class TransientError(Exception): + pass + + sleep.return_value = future_result(None) + batch = _batch.get_batch.return_value + future = _future_exception(TransientError("oops")) + batch.add.return_value = future + _global_cache.return_value = mock.Mock( + transient_errors=(TransientError,), + spec=("transient_errors", "strict_write"), + ) + + with pytest.raises(TransientError): + _cache.global_set(b"key", b"value").result() + + _batch.get_batch.assert_called_with(_cache._GlobalCacheSetBatch, {}) + batch.add.assert_called_with(b"key", b"value") + + @staticmethod + @mock.patch("google.cloud.ndb._cache._global_cache") + @mock.patch("google.cloud.ndb._cache._batch") + def test_error_not_strict_already_warned(_batch, _global_cache): + class TransientError(Exception): + pass + + batch = _batch.get_batch.return_value + error = TransientError("oops") + error._ndb_warning_logged = True + future = _future_exception(error) + batch.add.return_value = future + _global_cache.return_value = mock.Mock( + transient_errors=(TransientError,), + strict_write=False, + spec=("transient_errors", "strict_write"), + ) + + with warnings.catch_warnings(record=True) as logged: + assert _cache.global_set(b"key", b"value").result() is None + assert len(logged) in [0, 1] + + _batch.get_batch.assert_called_once_with(_cache._GlobalCacheSetBatch, {}) + batch.add.assert_called_once_with(b"key", b"value") + + @staticmethod + @mock.patch("google.cloud.ndb._cache._global_cache") + @mock.patch("google.cloud.ndb._cache._batch") + def test_with_expires(_batch, _global_cache): + batch = _batch.get_batch.return_value + future = _future_result("hi mom!") + batch.add.return_value = future + _global_cache.return_value = mock.Mock( + transient_errors=(), + strict_write=False, + spec=("transient_errors", "strict_write"), + ) + + future = _cache.global_set(b"key", b"value", expires=5) + assert future.result() == "hi mom!" + _batch.get_batch.assert_called_once_with( + _cache._GlobalCacheSetBatch, {"expires": 5} + ) + batch.add.assert_called_once_with(b"key", b"value") + + +class Test_GlobalCacheSetBatch: + @staticmethod + def test_add_duplicate_key_and_value(): + batch = _cache._GlobalCacheSetBatch({}) + future1 = batch.add(b"foo", b"one") + future2 = batch.add(b"foo", b"one") + assert future1 is future2 + + @staticmethod + def test_add_and_idle_and_done_callbacks(in_context): + cache = mock.Mock(spec=("set",)) + cache.set.return_value = [] + + batch = _cache._GlobalCacheSetBatch({}) + future1 = batch.add(b"foo", b"one") + future2 = batch.add(b"bar", b"two") + + assert batch.expires is None + + with in_context.new(global_cache=cache).use(): + batch.idle_callback() + + cache.set.assert_called_once_with( + {b"foo": b"one", b"bar": b"two"}, expires=None + ) + assert future1.result() is None + assert future2.result() is None + + @staticmethod + def test_add_and_idle_and_done_callbacks_with_duplicate_keys(in_context): + cache = mock.Mock(spec=("set",)) + cache.set.return_value = [] + + batch = _cache._GlobalCacheSetBatch({}) + future1 = batch.add(b"foo", b"one") + future2 = batch.add(b"foo", b"two") + + assert batch.expires is None + + with in_context.new(global_cache=cache).use(): + batch.idle_callback() + + cache.set.assert_called_once_with({b"foo": b"one"}, expires=None) + assert future1.result() is None + with pytest.raises(RuntimeError): + future2.result() + + @staticmethod + def test_add_and_idle_and_done_callbacks_with_expires(in_context): + cache = mock.Mock(spec=("set",)) + cache.set.return_value = [] + + batch = _cache._GlobalCacheSetBatch({"expires": 5}) + future1 = batch.add(b"foo", b"one") + future2 = batch.add(b"bar", b"two") + + assert batch.expires == 5 + + with in_context.new(global_cache=cache).use(): + batch.idle_callback() + + cache.set.assert_called_once_with({b"foo": b"one", b"bar": b"two"}, expires=5) + assert future1.result() is None + assert future2.result() is None + + @staticmethod + def test_add_and_idle_and_done_callbacks_w_error(in_context): + error = Exception("spurious error") + cache = mock.Mock(spec=("set",)) + cache.set.return_value = tasklets.Future() + cache.set.return_value.set_exception(error) + + batch = _cache._GlobalCacheSetBatch({}) + future1 = batch.add(b"foo", b"one") + future2 = batch.add(b"bar", b"two") + + with in_context.new(global_cache=cache).use(): + batch.idle_callback() + + cache.set.assert_called_once_with( + {b"foo": b"one", b"bar": b"two"}, expires=None + ) + assert future1.exception() is error + assert future2.exception() is error + + @staticmethod + def test_done_callbacks_with_results(in_context): + class SpeciousError(Exception): + pass + + cache_call = _future_result( + { + b"foo": "this is a result", + b"bar": SpeciousError("this is also a kind of result"), + } + ) + + batch = _cache._GlobalCacheSetBatch({}) + future1 = batch.add(b"foo", b"one") + future2 = batch.add(b"bar", b"two") + + batch.done_callback(cache_call) + + assert future1.result() == "this is a result" + with pytest.raises(SpeciousError): + assert future2.result() + + +@pytest.mark.usefixtures("in_context") +class Test_global_set_if_not_exists: + @staticmethod + @mock.patch("google.cloud.ndb._cache._global_cache") + @mock.patch("google.cloud.ndb._cache._batch") + def test_without_expires(_batch, _global_cache): + batch = _batch.get_batch.return_value + future = _future_result("hi mom!") + batch.add.return_value = future + _global_cache.return_value = mock.Mock( + transient_errors=(), + strict_write=False, + spec=("transient_errors", "strict_write"), + ) + + assert _cache.global_set_if_not_exists(b"key", b"value").result() == "hi mom!" + _batch.get_batch.assert_called_once_with( + _cache._GlobalCacheSetIfNotExistsBatch, {} + ) + batch.add.assert_called_once_with(b"key", b"value") + + @staticmethod + @mock.patch("google.cloud.ndb._cache._global_cache") + @mock.patch("google.cloud.ndb._cache._batch") + def test_transientError(_batch, _global_cache): + class TransientError(Exception): + pass + + batch = _batch.get_batch.return_value + future = _future_exception(TransientError("oops, mom!")) + batch.add.return_value = future + _global_cache.return_value = mock.Mock( + transient_errors=(TransientError,), + strict_write=False, + spec=("transient_errors", "strict_write"), + ) + + assert _cache.global_set_if_not_exists(b"key", b"value").result() is False + _batch.get_batch.assert_called_once_with( + _cache._GlobalCacheSetIfNotExistsBatch, {} + ) + batch.add.assert_called_once_with(b"key", b"value") + + @staticmethod + @mock.patch("google.cloud.ndb._cache._global_cache") + @mock.patch("google.cloud.ndb._cache._batch") + def test_with_expires(_batch, _global_cache): + batch = _batch.get_batch.return_value + future = _future_result("hi mom!") + batch.add.return_value = future + _global_cache.return_value = mock.Mock( + transient_errors=(), + strict_write=False, + spec=("transient_errors", "strict_write"), + ) + + assert ( + _cache.global_set_if_not_exists(b"key", b"value", expires=123).result() + == "hi mom!" + ) + _batch.get_batch.assert_called_once_with( + _cache._GlobalCacheSetIfNotExistsBatch, {"expires": 123} + ) + batch.add.assert_called_once_with(b"key", b"value") + + +class Test_GlobalCacheSetIfNotExistsBatch: + @staticmethod + def test_add_duplicate_key_and_value(): + batch = _cache._GlobalCacheSetIfNotExistsBatch({}) + future1 = batch.add(b"foo", b"one") + future2 = batch.add(b"foo", b"one") + assert not future1.done() + assert future2.result() is False + + @staticmethod + def test_add_and_idle_and_done_callbacks(in_context): + cache = mock.Mock(spec=("set_if_not_exists",)) + cache.set_if_not_exists.return_value = {} + + batch = _cache._GlobalCacheSetIfNotExistsBatch({}) + future1 = batch.add(b"foo", b"one") + future2 = batch.add(b"bar", b"two") + + assert batch.expires is None + + with in_context.new(global_cache=cache).use(): + batch.idle_callback() + + cache.set_if_not_exists.assert_called_once_with( + {b"foo": b"one", b"bar": b"two"}, expires=None + ) + assert future1.result() is None + assert future2.result() is None + + @staticmethod + def test_add_and_idle_and_done_callbacks_with_duplicate_keys(in_context): + cache = mock.Mock(spec=("set_if_not_exists",)) + cache.set_if_not_exists.return_value = {b"foo": True} + + batch = _cache._GlobalCacheSetIfNotExistsBatch({}) + future1 = batch.add(b"foo", b"one") + future2 = batch.add(b"foo", b"two") + + assert batch.expires is None + + with in_context.new(global_cache=cache).use(): + batch.idle_callback() + + cache.set_if_not_exists.assert_called_once_with({b"foo": b"one"}, expires=None) + assert future1.result() is True + assert future2.result() is False + + @staticmethod + def test_add_and_idle_and_done_callbacks_with_expires(in_context): + cache = mock.Mock(spec=("set_if_not_exists",)) + cache.set_if_not_exists.return_value = [] + + batch = _cache._GlobalCacheSetIfNotExistsBatch({"expires": 5}) + future1 = batch.add(b"foo", b"one") + future2 = batch.add(b"bar", b"two") + + assert batch.expires == 5 + + with in_context.new(global_cache=cache).use(): + batch.idle_callback() + + cache.set_if_not_exists.assert_called_once_with( + {b"foo": b"one", b"bar": b"two"}, expires=5 + ) + assert future1.result() is None + assert future2.result() is None + + @staticmethod + def test_add_and_idle_and_done_callbacks_w_error(in_context): + error = Exception("spurious error") + cache = mock.Mock(spec=("set_if_not_exists",)) + cache.set_if_not_exists.return_value = tasklets.Future() + cache.set_if_not_exists.return_value.set_exception(error) + + batch = _cache._GlobalCacheSetIfNotExistsBatch({}) + future1 = batch.add(b"foo", b"one") + future2 = batch.add(b"bar", b"two") + + with in_context.new(global_cache=cache).use(): + batch.idle_callback() + + cache.set_if_not_exists.assert_called_once_with( + {b"foo": b"one", b"bar": b"two"}, expires=None + ) + assert future1.exception() is error + assert future2.exception() is error + + @staticmethod + def test_done_callbacks_with_results(in_context): + class SpeciousError(Exception): + pass + + cache_call = _future_result( + { + b"foo": "this is a result", + b"bar": SpeciousError("this is also a kind of result"), + } + ) + + batch = _cache._GlobalCacheSetIfNotExistsBatch({}) + future1 = batch.add(b"foo", b"one") + future2 = batch.add(b"bar", b"two") + + batch.done_callback(cache_call) + + assert future1.result() == "this is a result" + with pytest.raises(SpeciousError): + assert future2.result() + + +@pytest.mark.usefixtures("in_context") +@mock.patch("google.cloud.ndb._cache._global_cache") +@mock.patch("google.cloud.ndb._cache._batch") +def test_global_delete(_batch, _global_cache): + batch = _batch.get_batch.return_value + future = _future_result("hi mom!") + batch.add.return_value = future + _global_cache.return_value = mock.Mock( + transient_errors=(), + strict_write=False, + spec=("transient_errors", "strict_write"), + ) + + assert _cache.global_delete(b"key").result() == "hi mom!" + _batch.get_batch.assert_called_once_with(_cache._GlobalCacheDeleteBatch) + batch.add.assert_called_once_with(b"key") + + +class Test_GlobalCacheDeleteBatch: + @staticmethod + def test_add_and_idle_and_done_callbacks(in_context): + cache = mock.Mock() + + batch = _cache._GlobalCacheDeleteBatch({}) + future1 = batch.add(b"foo") + future2 = batch.add(b"bar") + + with in_context.new(global_cache=cache).use(): + batch.idle_callback() + + cache.delete.assert_called_once_with([b"foo", b"bar"]) + assert future1.result() is None + assert future2.result() is None + + +@pytest.mark.usefixtures("in_context") +@mock.patch("google.cloud.ndb._cache._global_cache") +@mock.patch("google.cloud.ndb._cache._batch") +def test_global_watch(_batch, _global_cache): + batch = _batch.get_batch.return_value + future = _future_result("hi mom!") + batch.add.return_value = future + _global_cache.return_value = mock.Mock( + transient_errors=(), + strict_read=False, + spec=("transient_errors", "strict_read"), + ) + + assert _cache.global_watch(b"key", b"value").result() == "hi mom!" + _batch.get_batch.assert_called_once_with(_cache._GlobalCacheWatchBatch, {}) + batch.add.assert_called_once_with(b"key", b"value") + + +@pytest.mark.usefixtures("in_context") +class Test_GlobalCacheWatchBatch: + @staticmethod + def test_add_and_idle_and_done_callbacks(in_context): + cache = mock.Mock(spec=("watch",)) + cache.watch.return_value = None + + batch = _cache._GlobalCacheWatchBatch({}) + future1 = batch.add(b"foo", b"one") + future2 = batch.add(b"bar", b"two") + + assert batch.expires is None + + with in_context.new(global_cache=cache).use(): + batch.idle_callback() + + cache.watch.assert_called_once_with({b"foo": b"one", b"bar": b"two"}) + assert future1.result() is None + assert future2.result() is None + + +@pytest.mark.usefixtures("in_context") +@mock.patch("google.cloud.ndb._cache._global_cache") +@mock.patch("google.cloud.ndb._cache._batch") +def test_global_unwatch(_batch, _global_cache): + batch = _batch.get_batch.return_value + future = _future_result("hi mom!") + batch.add.return_value = future + _global_cache.return_value = mock.Mock( + transient_errors=(), + strict_write=False, + spec=("transient_errors", "strict_write"), + ) + + assert _cache.global_unwatch(b"key").result() == "hi mom!" + _batch.get_batch.assert_called_once_with(_cache._GlobalCacheUnwatchBatch, {}) + batch.add.assert_called_once_with(b"key") + + +class Test_GlobalCacheUnwatchBatch: + @staticmethod + def test_add_and_idle_and_done_callbacks(in_context): + cache = mock.Mock() + + batch = _cache._GlobalCacheUnwatchBatch({}) + future1 = batch.add(b"foo") + future2 = batch.add(b"bar") + + with in_context.new(global_cache=cache).use(): + batch.idle_callback() + + cache.unwatch.assert_called_once_with([b"foo", b"bar"]) + assert future1.result() is None + assert future2.result() is None + + +@pytest.mark.usefixtures("in_context") +class Test_global_compare_and_swap: + @staticmethod + @mock.patch("google.cloud.ndb._cache._global_cache") + @mock.patch("google.cloud.ndb._cache._batch") + def test_without_expires(_batch, _global_cache): + batch = _batch.get_batch.return_value + future = _future_result("hi mom!") + batch.add.return_value = future + _global_cache.return_value = mock.Mock( + transient_errors=(), + strict_read=False, + spec=("transient_errors", "strict_read"), + ) + + future = _cache.global_compare_and_swap(b"key", b"value") + assert future.result() == "hi mom!" + _batch.get_batch.assert_called_once_with( + _cache._GlobalCacheCompareAndSwapBatch, {} + ) + batch.add.assert_called_once_with(b"key", b"value") + + @staticmethod + @mock.patch("google.cloud.ndb._cache._global_cache") + @mock.patch("google.cloud.ndb._cache._batch") + def test_with_expires(_batch, _global_cache): + batch = _batch.get_batch.return_value + future = _future_result("hi mom!") + batch.add.return_value = future + _global_cache.return_value = mock.Mock( + transient_errors=(), + strict_read=False, + spec=("transient_errors", "strict_read"), + ) + + future = _cache.global_compare_and_swap(b"key", b"value", expires=5) + assert future.result() == "hi mom!" + _batch.get_batch.assert_called_once_with( + _cache._GlobalCacheCompareAndSwapBatch, {"expires": 5} + ) + batch.add.assert_called_once_with(b"key", b"value") + + +class Test_GlobalCacheCompareAndSwapBatch: + @staticmethod + def test_add_and_idle_and_done_callbacks(in_context): + cache = mock.Mock(spec=("compare_and_swap",)) + cache.compare_and_swap.return_value = None + + batch = _cache._GlobalCacheCompareAndSwapBatch({}) + future1 = batch.add(b"foo", b"one") + future2 = batch.add(b"bar", b"two") + + assert batch.expires is None + + with in_context.new(global_cache=cache).use(): + batch.idle_callback() + + cache.compare_and_swap.assert_called_once_with( + {b"foo": b"one", b"bar": b"two"}, expires=None + ) + assert future1.result() is None + assert future2.result() is None + + @staticmethod + def test_add_and_idle_and_done_callbacks_with_expires(in_context): + cache = mock.Mock(spec=("compare_and_swap",)) + cache.compare_and_swap.return_value = None + + batch = _cache._GlobalCacheCompareAndSwapBatch({"expires": 5}) + future1 = batch.add(b"foo", b"one") + future2 = batch.add(b"bar", b"two") + + assert batch.expires == 5 + + with in_context.new(global_cache=cache).use(): + batch.idle_callback() + + cache.compare_and_swap.assert_called_once_with( + {b"foo": b"one", b"bar": b"two"}, expires=5 + ) + assert future1.result() is None + assert future2.result() is None + + +@pytest.mark.usefixtures("in_context") +class Test_global_lock_for_read: + @staticmethod + @mock.patch("google.cloud.ndb._cache.global_set_if_not_exists") + def test_lock_acquired(global_set_if_not_exists): + global_set_if_not_exists.return_value = _future_result(True) + lock = _cache.global_lock_for_read(b"key", None).result() + assert lock.startswith(_cache._LOCKED_FOR_READ) + + @staticmethod + @mock.patch("google.cloud.ndb._cache.global_set_if_not_exists") + def test_lock_not_acquired(global_set_if_not_exists): + global_set_if_not_exists.return_value = _future_result(False) + lock = _cache.global_lock_for_read(b"key", None).result() + assert lock is None + + @staticmethod + @mock.patch("google.cloud.ndb._cache.global_compare_and_swap") + @mock.patch("google.cloud.ndb._cache.global_watch") + def test_recently_written_and_lock_acquired(global_watch, global_compare_and_swap): + global_watch.return_value = _future_result(True) + global_compare_and_swap.return_value = _future_result(True) + lock = _cache.global_lock_for_read(b"key", _cache._LOCKED_FOR_WRITE).result() + assert lock.startswith(_cache._LOCKED_FOR_READ) + + @staticmethod + @mock.patch("google.cloud.ndb._cache.global_compare_and_swap") + @mock.patch("google.cloud.ndb._cache.global_watch") + def test_recently_written_and_lock_not_acquired( + global_watch, global_compare_and_swap + ): + global_watch.return_value = _future_result(True) + global_compare_and_swap.return_value = _future_result(False) + lock = _cache.global_lock_for_read(b"key", _cache._LOCKED_FOR_WRITE).result() + assert lock is None + + +@pytest.mark.usefixtures("in_context") +class Test_global_lock_for_write: + @staticmethod + @mock.patch("google.cloud.ndb._cache.uuid") + @mock.patch("google.cloud.ndb._cache.global_set_if_not_exists") + @mock.patch("google.cloud.ndb._cache._global_get") + @mock.patch("google.cloud.ndb._cache._global_cache") + def test_first_time(_global_cache, _global_get, global_set_if_not_exists, uuid): + uuid.uuid4.return_value = "arandomuuid" + + _global_cache.return_value = mock.Mock( + transient_errors=(), + strict_write=False, + spec=("transient_errors", "strict_write"), + ) + + lock_value = _cache._LOCKED_FOR_WRITE + b".arandomuuid" + _global_get.return_value = _future_result(None) + global_set_if_not_exists.return_value = _future_result(True) + + assert _cache.global_lock_for_write(b"key").result() == b".arandomuuid" + _global_get.assert_called_once_with(b"key") + global_set_if_not_exists.assert_called_once_with(b"key", lock_value, expires=64) + + @staticmethod + @mock.patch("google.cloud.ndb._cache.uuid") + @mock.patch("google.cloud.ndb._cache._global_compare_and_swap") + @mock.patch("google.cloud.ndb._cache._global_watch") + @mock.patch("google.cloud.ndb._cache._global_get") + @mock.patch("google.cloud.ndb._cache._global_cache") + def test_not_first_time_fail_once( + _global_cache, _global_get, _global_watch, _global_compare_and_swap, uuid + ): + uuid.uuid4.return_value = "arandomuuid" + + _global_cache.return_value = mock.Mock( + transient_errors=(), + strict_write=False, + spec=("transient_errors", "strict_write"), + ) + + old_lock_value = _cache._LOCKED_FOR_WRITE + b".whatevs" + new_lock_value = old_lock_value + b".arandomuuid" + _global_get.return_value = _future_result(old_lock_value) + _global_watch.return_value = _future_result(None) + _global_compare_and_swap.side_effect = ( + _future_result(False), + _future_result(True), + ) + assert _cache.global_lock_for_write(b"key").result() == b".arandomuuid" + _global_get.assert_has_calls( + [ + mock.call(b"key"), + mock.call(b"key"), + ] + ) + _global_watch.assert_has_calls( + [ + mock.call(b"key", old_lock_value), + mock.call(b"key", old_lock_value), + ] + ) + _global_compare_and_swap.assert_has_calls( + [ + mock.call(b"key", new_lock_value, expires=64), + mock.call(b"key", new_lock_value, expires=64), + ] + ) + + +@pytest.mark.usefixtures("in_context") +class Test_global_unlock_for_write: + @staticmethod + @mock.patch("google.cloud.ndb._cache.uuid") + @mock.patch("google.cloud.ndb._cache._global_compare_and_swap") + @mock.patch("google.cloud.ndb._cache._global_watch") + @mock.patch("google.cloud.ndb._cache._global_get") + @mock.patch("google.cloud.ndb._cache._global_cache") + def test_last_time( + _global_cache, _global_get, _global_watch, _global_compare_and_swap, uuid + ): + lock = b".arandomuuid" + + _global_cache.return_value = mock.Mock( + transient_errors=(), + strict_write=False, + spec=("transient_errors", "strict_write"), + ) + + lock_value = _cache._LOCKED_FOR_WRITE + lock + _global_get.return_value = _future_result(lock_value) + _global_watch.return_value = _future_result(None) + _global_compare_and_swap.return_value = _future_result(True) + + assert _cache.global_unlock_for_write(b"key", lock).result() is None + _global_get.assert_called_once_with(b"key") + _global_watch.assert_called_once_with(b"key", lock_value) + _global_compare_and_swap.assert_called_once_with(b"key", b"", expires=64) + + @staticmethod + @mock.patch("google.cloud.ndb._cache.uuid") + @mock.patch("google.cloud.ndb._cache._global_compare_and_swap") + @mock.patch("google.cloud.ndb._cache._global_watch") + @mock.patch("google.cloud.ndb._cache._global_get") + @mock.patch("google.cloud.ndb._cache._global_cache") + def test_lock_missing( + _global_cache, _global_get, _global_watch, _global_compare_and_swap, uuid + ): + lock = b".arandomuuid" + + _global_cache.return_value = mock.Mock( + transient_errors=(), + strict_write=False, + spec=("transient_errors", "strict_write"), + ) + + lock_value = _cache._LOCKED_FOR_WRITE + b".adifferentlock" + _global_get.return_value = _future_result(lock_value) + _global_watch.return_value = _future_result(None) + _global_compare_and_swap.return_value = _future_result(True) + + with warnings.catch_warnings(record=True) as logged: + assert _cache.global_unlock_for_write(b"key", lock).result() is None + logged = [ + warning for warning in logged if warning.category is RuntimeWarning + ] + assert len(logged) == 1 + + _global_get.assert_called_once_with(b"key") + _global_watch.assert_not_called() + _global_compare_and_swap.assert_not_called() + + @staticmethod + @mock.patch("google.cloud.ndb._cache.uuid") + @mock.patch("google.cloud.ndb._cache.global_set_if_not_exists") + @mock.patch("google.cloud.ndb._cache._global_get") + @mock.patch("google.cloud.ndb._cache._global_cache") + def test_no_value_in_cache( + _global_cache, _global_get, global_set_if_not_exists, uuid + ): + lock = b".arandomuuid" + + _global_cache.return_value = mock.Mock( + transient_errors=(), + strict_write=False, + spec=("transient_errors", "strict_write"), + ) + + _global_get.return_value = _future_result(None) + global_set_if_not_exists.return_value = _future_result(True) + + with warnings.catch_warnings(record=True) as logged: + assert _cache.global_unlock_for_write(b"key", lock).result() is None + logged = [ + warning for warning in logged if warning.category is RuntimeWarning + ] + assert len(logged) == 1 + + _global_get.assert_called_once_with(b"key") + global_set_if_not_exists.assert_not_called() + + @staticmethod + @mock.patch("google.cloud.ndb._cache.uuid") + @mock.patch("google.cloud.ndb._cache._global_compare_and_swap") + @mock.patch("google.cloud.ndb._cache._global_watch") + @mock.patch("google.cloud.ndb._cache._global_get") + @mock.patch("google.cloud.ndb._cache._global_cache") + def test_lock_overwritten( + _global_cache, _global_get, _global_watch, _global_compare_and_swap, uuid + ): + lock = b".arandomuuid" + + _global_cache.return_value = mock.Mock( + transient_errors=(), + strict_write=False, + spec=("transient_errors", "strict_write"), + ) + + lock_value = b"SOMERANDOMVALUE" + _global_get.return_value = _future_result(lock_value) + _global_watch.return_value = _future_result(None) + _global_compare_and_swap.return_value = _future_result(True) + + with warnings.catch_warnings(record=True) as logged: + assert _cache.global_unlock_for_write(b"key", lock).result() is None + logged = [ + warning for warning in logged if warning.category is RuntimeWarning + ] + assert len(logged) == 1 + + _global_get.assert_called_once_with(b"key") + _global_watch.assert_called_once_with(b"key", lock_value) + _global_compare_and_swap.assert_called_once_with(b"key", b"", expires=64) + + @staticmethod + @mock.patch("google.cloud.ndb._cache.uuid") + @mock.patch("google.cloud.ndb._cache._global_watch") + @mock.patch("google.cloud.ndb._cache._global_get") + @mock.patch("google.cloud.ndb._cache._global_cache") + def test_transient_error(_global_cache, _global_get, _global_watch, uuid): + class TransientError(Exception): + pass + + lock = b".arandomuuid" + + _global_cache.return_value = mock.Mock( + transient_errors=(TransientError,), + strict_write=False, + spec=("transient_errors", "strict_write"), + ) + + lock_value = _cache._LOCKED_FOR_WRITE + lock + _global_get.return_value = _future_result(lock_value) + _global_watch.return_value = _future_exception(TransientError()) + + assert _cache.global_unlock_for_write(b"key", lock).result() is None + _global_get.assert_called_once_with(b"key") + _global_watch.assert_called_once_with(b"key", lock_value) + + @staticmethod + @mock.patch("google.cloud.ndb._cache.uuid") + @mock.patch("google.cloud.ndb._cache._global_compare_and_swap") + @mock.patch("google.cloud.ndb._cache._global_watch") + @mock.patch("google.cloud.ndb._cache._global_get") + @mock.patch("google.cloud.ndb._cache._global_cache") + def test_not_last_time_fail_once( + _global_cache, _global_get, _global_watch, _global_compare_and_swap, uuid + ): + lock = b".arandomuuid" + + _global_cache.return_value = mock.Mock( + transient_errors=(), + strict_write=False, + spec=("transient_errors", "strict_write"), + ) + + new_lock_value = _cache._LOCKED_FOR_WRITE + b".whatevs" + old_lock_value = new_lock_value + lock + _global_get.return_value = _future_result(old_lock_value) + _global_watch.return_value = _future_result(None) + _global_compare_and_swap.side_effect = ( + _future_result(False), + _future_result(True), + ) + + assert _cache.global_unlock_for_write(b"key", lock).result() is None + _global_get.assert_has_calls( + [ + mock.call(b"key"), + mock.call(b"key"), + ] + ) + _global_watch.assert_has_calls( + [ + mock.call(b"key", old_lock_value), + mock.call(b"key", old_lock_value), + ] + ) + _global_compare_and_swap.assert_has_calls( + [ + mock.call(b"key", new_lock_value, expires=64), + mock.call(b"key", new_lock_value, expires=64), + ] + ) + + +def test_is_locked_value(): + assert _cache.is_locked_value(_cache._LOCKED_FOR_READ) + assert _cache.is_locked_value(_cache._LOCKED_FOR_WRITE + b"whatever") + assert not _cache.is_locked_value(b"") + assert not _cache.is_locked_value(b"new db, who dis?") + assert not _cache.is_locked_value(None) + + +def test_global_cache_key(): + key = mock.Mock() + key.to_protobuf.return_value._pb.SerializeToString.return_value = b"himom!" + assert _cache.global_cache_key(key) == _cache._PREFIX + b"himom!" + key.to_protobuf.assert_called_once_with() + key.to_protobuf.return_value._pb.SerializeToString.assert_called_once_with() + + +def _future_result(result): + future = tasklets.Future() + future.set_result(result) + return future + + +def _future_exception(error): + future = tasklets.Future() + future.set_exception(error) + return future diff --git a/packages/google-cloud-ndb/tests/unit/test__datastore_api.py b/packages/google-cloud-ndb/tests/unit/test__datastore_api.py new file mode 100644 index 000000000000..0db656a32d26 --- /dev/null +++ b/packages/google-cloud-ndb/tests/unit/test__datastore_api.py @@ -0,0 +1,1515 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from unittest import mock + +import grpc +import pytest + +from google.api_core import client_info +from google.api_core import exceptions as core_exceptions +from google.cloud.datastore import entity +from google.cloud.datastore import helpers +from google.cloud.datastore import key as ds_key_module +from google.cloud.datastore_v1.types import datastore as datastore_pb2 +from google.cloud.datastore_v1.types import entity as entity_pb2 +from google.cloud.ndb import _batch +from google.cloud.ndb import _cache +from google.cloud.ndb import context as context_module +from google.cloud.ndb import _datastore_api as _api +from google.cloud.ndb import key as key_module +from google.cloud.ndb import model +from google.cloud.ndb import _options +from google.cloud.ndb import tasklets +from google.cloud.ndb import __version__ + +from . import utils + + +def future_result(result): + future = tasklets.Future() + future.set_result(result) + return future + + +class TestStub: + @staticmethod + def test_it(): + client = mock.Mock( + _credentials="creds", + secure=True, + host="thehost", + stub=object(), + spec=("_credentials", "secure", "host", "stub"), + client_info=client_info.ClientInfo( + user_agent="google-cloud-ndb/{}".format(__version__) + ), + ) + context = context_module.Context(client) + with context.use(): + assert _api.stub() is client.stub + + +class Test_make_call: + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_api._retry") + @mock.patch("google.cloud.ndb._datastore_api.stub") + def test_defaults(stub, _retry): + api = stub.return_value + future = tasklets.Future() + api.foo.future.return_value = future + _retry.retry_async.return_value = mock.Mock(return_value=future) + future.set_result("bar") + + request = object() + assert _api.make_call("foo", request).result() == "bar" + _retry.retry_async.assert_called_once() + tasklet = _retry.retry_async.call_args[0][0] + assert tasklet().result() == "bar" + retries = _retry.retry_async.call_args[1]["retries"] + assert retries is _retry._DEFAULT_RETRIES + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_api._retry") + @mock.patch("google.cloud.ndb._datastore_api.stub") + def test_explicit_retries(stub, _retry): + api = stub.return_value + future = tasklets.Future() + api.foo.future.return_value = future + _retry.retry_async.return_value = mock.Mock(return_value=future) + future.set_result("bar") + + request = object() + assert _api.make_call("foo", request, retries=4).result() == "bar" + _retry.retry_async.assert_called_once() + tasklet = _retry.retry_async.call_args[0][0] + assert tasklet().result() == "bar" + retries = _retry.retry_async.call_args[1]["retries"] + assert retries == 4 + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_api._retry") + @mock.patch("google.cloud.ndb._datastore_api.stub") + def test_no_retries(stub, _retry): + api = stub.return_value + future = tasklets.Future() + api.foo.future.return_value = future + _retry.retry_async.return_value = mock.Mock(return_value=future) + future.set_result("bar") + + request = object() + assert _api.make_call("foo", request, retries=0).result() == "bar" + _retry.retry_async.assert_not_called() + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_api._retry") + @mock.patch("google.cloud.ndb._datastore_api.stub") + def test_explicit_timeout(stub, _retry): + api = stub.return_value + future = tasklets.Future() + api.foo.future.return_value = future + _retry.retry_async.return_value = mock.Mock(return_value=future) + future.set_result("bar") + + request = object() + metadata = object() + call = _api.make_call("foo", request, retries=0, timeout=20, metadata=metadata) + assert call.result() == "bar" + api.foo.future.assert_called_once_with(request, timeout=20, metadata=metadata) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_api.stub") + def test_grpc_error(stub): + api = stub.return_value + future = tasklets.Future() + api.foo.future.return_value = future + + class DummyError(grpc.Call, Exception): + def code(self): + return grpc.StatusCode.UNAVAILABLE + + def details(self): + return "Where is the devil in?" + + try: + raise DummyError("Have to raise in order to get traceback") + except Exception as error: + future.set_exception(error) + + request = object() + with pytest.raises(core_exceptions.ServiceUnavailable): + _api.make_call("foo", request, retries=0).result() + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_api.stub") + def test_other_error(stub): + api = stub.return_value + future = tasklets.Future() + api.foo.future.return_value = future + + class DummyException(Exception): + pass + + try: + raise DummyException("Have to raise in order to get traceback") + except Exception as error: + future.set_exception(error) + + request = object() + with pytest.raises(DummyException): + _api.make_call("foo", request, retries=0).result() + + +def _mock_key(key_str): + key = mock.Mock(kind="SomeKind", spec=("to_protobuf", "kind")) + key.to_protobuf.return_value = protobuf = mock.Mock( + _pb=mock.Mock(spec=("SerializeToString",)) + ) + protobuf._pb.SerializeToString.return_value = key_str + return key + + +class Test_lookup: + @staticmethod + def test_it(context): + eventloop = mock.Mock(spec=("add_idle", "run")) + with context.new(eventloop=eventloop).use() as context: + _api.lookup(_mock_key("foo"), _options.ReadOptions()) + _api.lookup(_mock_key("foo"), _options.ReadOptions()) + _api.lookup(_mock_key("bar"), _options.ReadOptions()) + + batch = context.batches[_api._LookupBatch][()] + assert len(batch.todo["foo"]) == 2 + assert len(batch.todo["bar"]) == 1 + assert context.eventloop.add_idle.call_count == 1 + + @staticmethod + def test_it_with_options(context): + eventloop = mock.Mock(spec=("add_idle", "run")) + with context.new(eventloop=eventloop).use() as context: + _api.lookup(_mock_key("foo"), _options.ReadOptions()) + _api.lookup( + _mock_key("foo"), + _options.ReadOptions(read_consistency=_api.EVENTUAL), + ) + _api.lookup(_mock_key("bar"), _options.ReadOptions()) + + batches = context.batches[_api._LookupBatch] + batch1 = batches[()] + assert len(batch1.todo["foo"]) == 1 + assert len(batch1.todo["bar"]) == 1 + + batch2 = batches[(("read_consistency", _api.EVENTUAL),)] + assert len(batch2.todo) == 1 + assert len(batch2.todo["foo"]) == 1 + + add_idle = context.eventloop.add_idle + assert add_idle.call_count == 2 + + @staticmethod + def test_it_with_transaction(context): + eventloop = mock.Mock(spec=("add_idle", "run")) + new_context = context.new(eventloop=eventloop, transaction=b"tx123") + with new_context.use(): + new_context._use_global_cache = mock.Mock( + side_effect=Exception("Shouldn't call _use_global_cache") + ) + _api.lookup(_mock_key("foo"), _options.ReadOptions()) + _api.lookup(_mock_key("foo"), _options.ReadOptions()) + _api.lookup(_mock_key("bar"), _options.ReadOptions()) + + batch = new_context.batches[_api._LookupBatch][(("transaction", b"tx123"),)] + assert len(batch.todo["foo"]) == 2 + assert len(batch.todo["bar"]) == 1 + assert new_context.eventloop.add_idle.call_count == 1 + + @staticmethod + def test_it_no_global_cache_or_datastore(in_context): + with pytest.raises(TypeError): + _api.lookup( + _mock_key("foo"), _options.ReadOptions(use_datastore=False) + ).result() + + +class Test_lookup_WithGlobalCache: + @staticmethod + @mock.patch("google.cloud.ndb._datastore_api._LookupBatch") + def test_cache_miss(_LookupBatch, global_cache): + class SomeKind(model.Model): + pass + + key = key_module.Key("SomeKind", 1) + cache_key = _cache.global_cache_key(key._key) + + entity = SomeKind(key=key) + entity_pb = model._entity_to_protobuf(entity) + cache_value = entity_pb._pb.SerializeToString() + + batch = _LookupBatch.return_value + batch.add.return_value = future_result(entity_pb) + + future = _api.lookup(key._key, _options.ReadOptions()) + assert future.result() == entity_pb + + assert global_cache.get([cache_key]) == [cache_value] + + @staticmethod + @mock.patch("google.cloud.ndb._datastore_api._LookupBatch") + def test_cache_miss_followed_by_lock_acquisition_failure( + _LookupBatch, global_cache + ): + class SomeKind(model.Model): + pass + + key = key_module.Key("SomeKind", 1) + cache_key = _cache.global_cache_key(key._key) + + entity = SomeKind(key=key) + entity_pb = model._entity_to_protobuf(entity) + + batch = _LookupBatch.return_value + batch.add.return_value = future_result(entity_pb) + + global_cache.set_if_not_exists = mock.Mock( + return_value=future_result({cache_key: False}) + ) + + future = _api.lookup(key._key, _options.ReadOptions()) + assert future.result() == entity_pb + + assert global_cache.get([cache_key]) == [None] + + @staticmethod + @mock.patch("google.cloud.ndb._datastore_api._LookupBatch") + def test_cache_miss_no_datastore(_LookupBatch, global_cache): + class SomeKind(model.Model): + pass + + key = key_module.Key("SomeKind", 1) + cache_key = _cache.global_cache_key(key._key) + + batch = _LookupBatch.return_value + batch.add.side_effect = Exception("Shouldn't use Datastore") + + future = _api.lookup(key._key, _options.ReadOptions(use_datastore=False)) + assert future.result() is _api._NOT_FOUND + + assert global_cache.get([cache_key]) == [None] + + @staticmethod + @mock.patch("google.cloud.ndb._datastore_api._LookupBatch") + def test_cache_hit(_LookupBatch, global_cache): + class SomeKind(model.Model): + pass + + key = key_module.Key("SomeKind", 1) + cache_key = _cache.global_cache_key(key._key) + + entity = SomeKind(key=key) + entity_pb = model._entity_to_protobuf(entity) + cache_value = entity_pb._pb.SerializeToString() + + global_cache.set({cache_key: cache_value}) + + batch = _LookupBatch.return_value + batch.add.side_effect = Exception("Shouldn't get called.") + + future = _api.lookup(key._key, _options.ReadOptions()) + assert future.result() == entity_pb + + @staticmethod + @mock.patch("google.cloud.ndb._datastore_api._LookupBatch") + def test_cache_locked(_LookupBatch, global_cache): + class SomeKind(model.Model): + pass + + key = key_module.Key("SomeKind", 1) + cache_key = _cache.global_cache_key(key._key) + + entity = SomeKind(key=key) + entity_pb = model._entity_to_protobuf(entity) + + global_cache.set({cache_key: _cache._LOCKED_FOR_READ}) + + batch = _LookupBatch.return_value + batch.add.return_value = future_result(entity_pb) + + future = _api.lookup(key._key, _options.ReadOptions()) + assert future.result() == entity_pb + + assert global_cache.get([cache_key]) == [_cache._LOCKED_FOR_READ] + + @staticmethod + @mock.patch("google.cloud.ndb._datastore_api._LookupBatch") + def test_cache_not_found(_LookupBatch, global_cache): + class SomeKind(model.Model): + pass + + key = key_module.Key("SomeKind", 1) + cache_key = _cache.global_cache_key(key._key) + + batch = _LookupBatch.return_value + batch.add.return_value = future_result(_api._NOT_FOUND) + + future = _api.lookup(key._key, _options.ReadOptions()) + assert future.result() is _api._NOT_FOUND + + assert global_cache.get([cache_key])[0].startswith(_cache._LOCKED_FOR_READ) + assert len(global_cache._watch_keys) == 0 + + +class Test_LookupBatch: + @staticmethod + @mock.patch("google.cloud.ndb._datastore_api.entity_pb2") + @mock.patch("google.cloud.ndb._datastore_api._datastore_lookup") + def test_idle_callback(_datastore_lookup, entity_pb2, context): + class MockKeyPb: + def __init__(self, key=None, parent=None): + self.key = key + self.parent = parent + + def ParseFromString(self, key): + self.key = key + self.parent.key = key + + class MockKey: + def __init__(self, key=None): + self.key = key + self._pb = MockKeyPb(key, self) + + rpc = tasklets.Future("_datastore_lookup") + _datastore_lookup.return_value = rpc + + entity_pb2.Key = MockKey + eventloop = mock.Mock(spec=("queue_rpc", "run")) + with context.new(eventloop=eventloop).use() as context: + batch = _api._LookupBatch(_options.ReadOptions()) + batch.lookup_callback = mock.Mock() + batch.todo.update({"foo": ["one", "two"], "bar": ["three"]}) + batch.idle_callback() + + called_with = _datastore_lookup.call_args[0] + called_with_keys = set((mock_key.key for mock_key in called_with[0])) + assert called_with_keys == set(["foo", "bar"]) + called_with_options = called_with[1] + assert called_with_options == datastore_pb2.ReadOptions() + + rpc.set_result(None) + batch.lookup_callback.assert_called_once_with(rpc) + + @staticmethod + def test_lookup_callback_exception(): + future1, future2, future3 = (tasklets.Future() for _ in range(3)) + batch = _api._LookupBatch(_options.ReadOptions()) + batch.todo.update({"foo": [future1, future2], "bar": [future3]}) + error = Exception("Spurious error.") + + rpc = tasklets.Future() + rpc.set_exception(error) + batch.lookup_callback(rpc) + + assert future1.exception() is error + assert future2.exception() is error + + @staticmethod + def test_found(): + def key_pb(key): + mock_key = mock.Mock(_pb=mock.Mock(spec=("SerializeToString",))) + mock_key._pb.SerializeToString.return_value = key + return mock_key + + future1, future2, future3 = (tasklets.Future() for _ in range(3)) + batch = _api._LookupBatch(_options.ReadOptions()) + batch.todo.update({"foo": [future1, future2], "bar": [future3]}) + + entity1 = mock.Mock(key=key_pb("foo"), spec=("key",)) + entity2 = mock.Mock(key=key_pb("bar"), spec=("key",)) + response = mock.Mock( + found=[ + mock.Mock(entity=entity1, spec=("entity",)), + mock.Mock(entity=entity2, spec=("entity",)), + ], + missing=[], + deferred=[], + spec=("found", "missing", "deferred"), + ) + + rpc = tasklets.Future() + rpc.set_result(response) + batch.lookup_callback(rpc) + + assert future1.result() is entity1 + assert future2.result() is entity1 + assert future3.result() is entity2 + + @staticmethod + def test_missing(): + def key_pb(key): + mock_key = mock.Mock(_pb=mock.Mock(spec=("SerializeToString",))) + mock_key._pb.SerializeToString.return_value = key + return mock_key + + future1, future2, future3 = (tasklets.Future() for _ in range(3)) + batch = _api._LookupBatch(_options.ReadOptions()) + batch.todo.update({"foo": [future1, future2], "bar": [future3]}) + + entity1 = mock.Mock(key=key_pb("foo"), spec=("key",)) + entity2 = mock.Mock(key=key_pb("bar"), spec=("key",)) + response = mock.Mock( + missing=[ + mock.Mock(entity=entity1, spec=("entity",)), + mock.Mock(entity=entity2, spec=("entity",)), + ], + found=[], + deferred=[], + spec=("found", "missing", "deferred"), + ) + + rpc = tasklets.Future() + rpc.set_result(response) + batch.lookup_callback(rpc) + + assert future1.result() is _api._NOT_FOUND + assert future2.result() is _api._NOT_FOUND + assert future3.result() is _api._NOT_FOUND + + @staticmethod + def test_deferred(context): + def key_pb(key): + mock_key = mock.Mock(_pb=mock.Mock(spec=("SerializeToString",))) + mock_key._pb.SerializeToString.return_value = key + return mock_key + + eventloop = mock.Mock(spec=("add_idle", "run")) + with context.new(eventloop=eventloop).use() as context: + future1, future2, future3 = (tasklets.Future() for _ in range(3)) + batch = _api._LookupBatch(_options.ReadOptions()) + batch.todo.update({"foo": [future1, future2], "bar": [future3]}) + + response = mock.Mock( + missing=[], + found=[], + deferred=[key_pb("foo"), key_pb("bar")], + spec=("found", "missing", "deferred"), + ) + + rpc = tasklets.Future() + rpc.set_result(response) + batch.lookup_callback(rpc) + + assert future1.running() + assert future2.running() + assert future3.running() + + next_batch = context.batches[_api._LookupBatch][()] + assert next_batch.todo == batch.todo and next_batch is not batch + assert context.eventloop.add_idle.call_count == 1 + + @staticmethod + def test_found_missing_deferred(context): + def key_pb(key): + mock_key = mock.Mock(_pb=mock.Mock(spec=("SerializeToString",))) + mock_key._pb.SerializeToString.return_value = key + return mock_key + + eventloop = mock.Mock(spec=("add_idle", "run")) + with context.new(eventloop=eventloop).use() as context: + future1, future2, future3 = (tasklets.Future() for _ in range(3)) + batch = _api._LookupBatch(_options.ReadOptions()) + batch.todo.update({"foo": [future1], "bar": [future2], "baz": [future3]}) + + entity1 = mock.Mock(key=key_pb("foo"), spec=("key",)) + entity2 = mock.Mock(key=key_pb("bar"), spec=("key",)) + response = mock.Mock( + found=[mock.Mock(entity=entity1, spec=("entity",))], + missing=[mock.Mock(entity=entity2, spec=("entity",))], + deferred=[key_pb("baz")], + spec=("found", "missing", "deferred"), + ) + + rpc = tasklets.Future() + rpc.set_result(response) + batch.lookup_callback(rpc) + + assert future1.result() is entity1 + assert future2.result() is _api._NOT_FOUND + assert future3.running() + + next_batch = context.batches[_api._LookupBatch][()] + assert next_batch.todo == {"baz": [future3]} + assert context.eventloop.add_idle.call_count == 1 + + +@mock.patch("google.cloud.ndb._datastore_api.datastore_pb2") +def test__datastore_lookup(datastore_pb2, context): + client = mock.Mock( + project="theproject", + database="testdb", + stub=mock.Mock(spec=("lookup",)), + spec=("project", "database", "stub"), + ) + with context.new(client=client).use() as context: + client.stub.lookup = lookup = mock.Mock(spec=("future",)) + future = tasklets.Future() + future.set_result("response") + lookup.future.return_value = future + datastore_pb2.LookupRequest.return_value.project_id = "theproject" + datastore_pb2.LookupRequest.return_value.database_id = "testdb" + assert _api._datastore_lookup(["foo", "bar"], None).result() == "response" + + datastore_pb2.LookupRequest.assert_called_once_with( + project_id="theproject", + database_id="testdb", + keys=["foo", "bar"], + read_options=None, + ) + client.stub.lookup.future.assert_called_once_with( + datastore_pb2.LookupRequest.return_value, + timeout=_api._DEFAULT_TIMEOUT, + metadata=( + ("x-goog-request-params", "project_id=theproject&database_id=testdb"), + ), + ) + + +class Test_get_read_options: + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_no_args_no_transaction(): + assert ( + _api.get_read_options(_options.ReadOptions()) == datastore_pb2.ReadOptions() + ) + + @staticmethod + def test_no_args_transaction(context): + with context.new(transaction=b"txfoo").use(): + options = _api.get_read_options(_options.ReadOptions()) + assert options == datastore_pb2.ReadOptions(transaction=b"txfoo") + + @staticmethod + def test_args_override_transaction(context): + with context.new(transaction=b"txfoo").use(): + options = _api.get_read_options(_options.ReadOptions(transaction=b"txbar")) + assert options == datastore_pb2.ReadOptions(transaction=b"txbar") + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_eventually_consistent(): + options = _api.get_read_options( + _options.ReadOptions(read_consistency=_api.EVENTUAL) + ) + assert options == datastore_pb2.ReadOptions( + read_consistency=datastore_pb2.ReadOptions.ReadConsistency.EVENTUAL + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_eventually_consistent_with_transaction(): + with pytest.raises(ValueError): + _api.get_read_options( + _options.ReadOptions( + read_consistency=_api.EVENTUAL, transaction=b"txfoo" + ) + ) + + +class Test_put: + @staticmethod + @mock.patch("google.cloud.ndb._datastore_api.datastore_pb2") + def test_no_transaction(datastore_pb2, in_context): + class Mutation: + def __init__(self, upsert=None): + self.upsert = upsert + + def __eq__(self, other): + return self.upsert == other.upsert + + def MockEntity(*path): + key = ds_key_module.Key(*path, project="testing") + return entity.Entity(key=key) + + datastore_pb2.Mutation = Mutation + + entity1 = MockEntity("a", "1") + _api.put(entity1, _options.Options()) + + entity2 = MockEntity("a") + _api.put(entity2, _options.Options()) + + entity3 = MockEntity("b") + _api.put(entity3, _options.Options()) + + batch = in_context.batches[_api._NonTransactionalCommitBatch][()] + assert batch.mutations == [ + Mutation(upsert=helpers.entity_to_protobuf(entity1)), + Mutation(upsert=helpers.entity_to_protobuf(entity2)), + Mutation(upsert=helpers.entity_to_protobuf(entity3)), + ] + + @staticmethod + @mock.patch("google.cloud.ndb._datastore_api.datastore_pb2") + def test_w_transaction(datastore_pb2, in_context): + class Mutation: + def __init__(self, upsert=None): + self.upsert = upsert + + def __eq__(self, other): + return self.upsert == other.upsert + + def MockEntity(*path): + key = ds_key_module.Key(*path, project="testing") + return entity.Entity(key=key) + + with in_context.new(transaction=b"123").use() as context: + datastore_pb2.Mutation = Mutation + + entity1 = MockEntity("a", "1") + _api.put(entity1, _options.Options()) + + entity2 = MockEntity("a") + _api.put(entity2, _options.Options()) + + entity3 = MockEntity("b") + _api.put(entity3, _options.Options()) + + batch = context.commit_batches[b"123"] + assert batch.mutations == [ + Mutation(upsert=helpers.entity_to_protobuf(entity1)), + Mutation(upsert=helpers.entity_to_protobuf(entity2)), + Mutation(upsert=helpers.entity_to_protobuf(entity3)), + ] + assert batch.transaction == b"123" + assert batch.incomplete_mutations == [ + Mutation(upsert=helpers.entity_to_protobuf(entity2)), + Mutation(upsert=helpers.entity_to_protobuf(entity3)), + ] + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_no_datastore_or_global_cache(): + def MockEntity(*path): + key = ds_key_module.Key(*path, project="testing") + return entity.Entity(key=key) + + mock_entity = MockEntity("what", "ever") + with pytest.raises(TypeError): + _api.put(mock_entity, _options.Options(use_datastore=False)).result() + + +class Test_put_WithGlobalCache: + @staticmethod + @mock.patch("google.cloud.ndb._datastore_api._NonTransactionalCommitBatch") + def test_no_key_returned(Batch, global_cache): + class SomeKind(model.Model): + pass + + key = key_module.Key("SomeKind", 1) + cache_key = _cache.global_cache_key(key._key) + + entity = SomeKind(key=key) + batch = Batch.return_value + batch.put.return_value = future_result(None) + + future = _api.put(model._entity_to_ds_entity(entity), _options.Options()) + assert future.result() is None + + assert not global_cache.get([cache_key])[0] + + @staticmethod + @mock.patch("google.cloud.ndb._datastore_api._NonTransactionalCommitBatch") + def test_key_returned(Batch, global_cache): + class SomeKind(model.Model): + pass + + key = key_module.Key("SomeKind", 1) + key_pb = key._key.to_protobuf() + cache_key = _cache.global_cache_key(key._key) + + entity = SomeKind(key=key) + batch = Batch.return_value + batch.put.return_value = future_result(key_pb) + + future = _api.put(model._entity_to_ds_entity(entity), _options.Options()) + assert future.result() == key._key + + assert not global_cache.get([cache_key])[0] + + @staticmethod + @mock.patch("google.cloud.ndb._datastore_api._NonTransactionalCommitBatch") + def test_w_transaction(Batch, global_cache): + class SomeKind(model.Model): + pass + + context = context_module.get_context() + callbacks = [] + with context.new( + transaction=b"abc123", transaction_complete_callbacks=callbacks + ).use(): + key = key_module.Key("SomeKind", 1) + cache_key = _cache.global_cache_key(key._key) + + entity = SomeKind(key=key) + batch = Batch.return_value + batch.put.return_value = future_result(None) + + future = _api.put(model._entity_to_ds_entity(entity), _options.Options()) + assert future.result() is None + + assert cache_key in global_cache.cache # lock + for callback in callbacks: + callback() + + # lock removed by callback + assert not global_cache.get([cache_key])[0] + + @staticmethod + @mock.patch("google.cloud.ndb._datastore_api._NonTransactionalCommitBatch") + def test_no_datastore(Batch, global_cache): + class SomeKind(model.Model): + pass + + key = key_module.Key("SomeKind", 1) + cache_key = _cache.global_cache_key(key._key) + + entity = SomeKind(key=key) + cache_value = model._entity_to_protobuf(entity)._pb.SerializeToString() + + batch = Batch.return_value + batch.put.return_value = future_result(None) + + future = _api.put( + model._entity_to_ds_entity(entity), + _options.Options(use_datastore=False), + ) + assert future.result() is None + + assert global_cache.get([cache_key]) == [cache_value] + + @staticmethod + @mock.patch("google.cloud.ndb._datastore_api._NonTransactionalCommitBatch") + def test_no_datastore_incomplete_key(Batch, global_cache): + class SomeKind(model.Model): + pass + + key = key_module.Key("SomeKind", None) + entity = SomeKind(key=key) + future = _api.put( + model._entity_to_ds_entity(entity), + _options.Options(use_datastore=False), + ) + with pytest.raises(TypeError): + future.result() + + +class Test_delete: + @staticmethod + @mock.patch("google.cloud.ndb._datastore_api.datastore_pb2") + def test_no_transaction(datastore_pb2, in_context): + class Mutation: + def __init__(self, delete=None): + self.delete = delete + + def __eq__(self, other): + return self.delete == other.delete + + datastore_pb2.Mutation = Mutation + + key1 = key_module.Key("SomeKind", 1)._key + key2 = key_module.Key("SomeKind", 2)._key + key3 = key_module.Key("SomeKind", 3)._key + _api.delete(key1, _options.Options()) + _api.delete(key2, _options.Options()) + _api.delete(key3, _options.Options()) + + batch = in_context.batches[_api._NonTransactionalCommitBatch][()] + assert batch.mutations == [ + Mutation(delete=key1.to_protobuf()), + Mutation(delete=key2.to_protobuf()), + Mutation(delete=key3.to_protobuf()), + ] + + @staticmethod + @mock.patch("google.cloud.ndb._datastore_api.datastore_pb2") + def test_w_transaction(datastore_pb2, in_context): + class Mutation: + def __init__(self, delete=None): + self.delete = delete + + def __eq__(self, other): + return self.delete == other.delete + + with in_context.new(transaction=b"tx123").use() as context: + datastore_pb2.Mutation = Mutation + + key1 = key_module.Key("SomeKind", 1)._key + key2 = key_module.Key("SomeKind", 2)._key + key3 = key_module.Key("SomeKind", 3)._key + assert _api.delete(key1, _options.Options()).result() is None + assert _api.delete(key2, _options.Options()).result() is None + assert _api.delete(key3, _options.Options()).result() is None + + batch = context.commit_batches[b"tx123"] + assert batch.mutations == [ + Mutation(delete=key1.to_protobuf()), + Mutation(delete=key2.to_protobuf()), + Mutation(delete=key3.to_protobuf()), + ] + + +class Test_delete_WithGlobalCache: + @staticmethod + @mock.patch("google.cloud.ndb._datastore_api._NonTransactionalCommitBatch") + def test_cache_enabled(Batch, global_cache): + key = key_module.Key("SomeKind", 1) + cache_key = _cache.global_cache_key(key._key) + + batch = Batch.return_value + batch.delete.return_value = future_result(None) + + future = _api.delete(key._key, _options.Options()) + assert future.result() is None + + assert not global_cache.get([cache_key])[0] + + @staticmethod + @mock.patch("google.cloud.ndb._datastore_api._NonTransactionalCommitBatch") + def test_w_transaction(Batch, global_cache): + context = context_module.get_context() + callbacks = [] + with context.new( + transaction=b"abc123", transaction_complete_callbacks=callbacks + ).use(): + key = key_module.Key("SomeKind", 1) + cache_key = _cache.global_cache_key(key._key) + + batch = Batch.return_value + batch.delete.return_value = future_result(None) + + future = _api.delete(key._key, _options.Options()) + assert future.result() is None + + assert cache_key in global_cache.cache # lock + for callback in callbacks: + callback() + + # lock removed by callback + assert not global_cache.get([cache_key])[0] + + @staticmethod + @mock.patch("google.cloud.ndb._datastore_api._NonTransactionalCommitBatch") + def test_without_datastore(Batch, global_cache): + key = key_module.Key("SomeKind", 1) + cache_key = _cache.global_cache_key(key._key) + global_cache.set({cache_key: b"foo"}) + + batch = Batch.return_value + batch.delete.side_effect = Exception("Shouldn't use Datastore") + + future = _api.delete(key._key, _options.Options(use_datastore=False)) + assert future.result() is None + + assert global_cache.get([cache_key]) == [None] + + @staticmethod + @mock.patch("google.cloud.ndb._datastore_api._NonTransactionalCommitBatch") + def test_cache_disabled(Batch, global_cache): + key = key_module.Key("SomeKind", 1) + cache_key = _cache.global_cache_key(key._key) + + batch = Batch.return_value + batch.delete.return_value = future_result(None) + + future = _api.delete(key._key, _options.Options(use_global_cache=False)) + assert future.result() is None + + assert global_cache.get([cache_key]) == [None] + + +class Test_NonTransactionalCommitBatch: + @staticmethod + @mock.patch("google.cloud.ndb._datastore_api._process_commit") + @mock.patch("google.cloud.ndb._datastore_api._datastore_commit") + def test_idle_callback(_datastore_commit, _process_commit, context): + eventloop = mock.Mock(spec=("queue_rpc", "run")) + + rpc = tasklets.Future("_datastore_commit") + _datastore_commit.return_value = rpc + + with context.new(eventloop=eventloop).use() as context: + mutation1, mutation2 = object(), object() + batch = _api._NonTransactionalCommitBatch(_options.Options()) + batch.mutations = [mutation1, mutation2] + batch.idle_callback() + + _datastore_commit.assert_called_once_with( + [mutation1, mutation2], None, retries=None, timeout=None + ) + rpc.set_result(None) + _process_commit.assert_called_once_with(rpc, batch.futures) + + +@mock.patch("google.cloud.ndb._datastore_api._get_commit_batch") +def test_prepare_to_commit(get_commit_batch): + _api.prepare_to_commit(b"123") + get_commit_batch.assert_called_once_with(b"123", _options.Options()) + batch = get_commit_batch.return_value + assert batch.preparing_to_commit is True + + +@mock.patch("google.cloud.ndb._datastore_api._get_commit_batch") +def test_commit(get_commit_batch): + _api.commit(b"123") + get_commit_batch.assert_called_once_with(b"123", _options.Options()) + get_commit_batch.return_value.commit.assert_called_once_with( + retries=None, timeout=None + ) + + +class Test_get_commit_batch: + @staticmethod + def test_create_batch(in_context): + batch = _api._get_commit_batch(b"123", _options.Options()) + assert isinstance(batch, _api._TransactionalCommitBatch) + assert in_context.commit_batches[b"123"] is batch + assert batch.transaction == b"123" + assert _api._get_commit_batch(b"123", _options.Options()) is batch + assert _api._get_commit_batch(b"234", _options.Options()) is not batch + + @staticmethod + def test_bad_option(): + with pytest.raises(NotImplementedError): + _api._get_commit_batch(b"123", _options.Options(retries=5)) + + +class Test__TransactionalCommitBatch: + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_idle_callback_nothing_to_do(): + batch = _api._TransactionalCommitBatch(b"123", _options.Options()) + batch.idle_callback() + assert not batch.allocating_ids + + @staticmethod + @mock.patch("google.cloud.ndb._datastore_api._datastore_allocate_ids") + def test_idle_callback_success(datastore_allocate_ids, in_context): + def Mutation(): + path = [entity_pb2.Key.PathElement(kind="SomeKind")] + return datastore_pb2.Mutation( + upsert=entity_pb2.Entity(key=entity_pb2.Key(path=path)) + ) + + mutation1, mutation2 = Mutation(), Mutation() + batch = _api._TransactionalCommitBatch(b"123", _options.Options()) + batch.incomplete_mutations = [mutation1, mutation2] + future1, future2 = tasklets.Future(), tasklets.Future() + batch.incomplete_futures = [future1, future2] + + rpc = tasklets.Future("_datastore_allocate_ids") + datastore_allocate_ids.return_value = rpc + + eventloop = mock.Mock(spec=("queue_rpc", "run")) + with in_context.new(eventloop=eventloop).use(): + batch.idle_callback() + + rpc.set_result( + mock.Mock( + keys=[ + entity_pb2.Key( + path=[entity_pb2.Key.PathElement(kind="SomeKind", id=1)] + ), + entity_pb2.Key( + path=[entity_pb2.Key.PathElement(kind="SomeKind", id=2)] + ), + ] + ) + ) + + allocating_ids = batch.allocating_ids[0] + assert future1.result().path[0].id == 1 + assert mutation1.upsert.key.path[0].id == 1 + assert future2.result().path[0].id == 2 + assert mutation2.upsert.key.path[0].id == 2 + assert allocating_ids.result() is None + + @staticmethod + @mock.patch("google.cloud.ndb._datastore_api._datastore_allocate_ids") + def test_idle_callback_failure(datastore_allocate_ids, in_context): + def Mutation(): + path = [entity_pb2.Key.PathElement(kind="SomeKind")] + return datastore_pb2.Mutation( + upsert=entity_pb2.Entity(key=entity_pb2.Key(path=path)) + ) + + mutation1, mutation2 = Mutation(), Mutation() + batch = _api._TransactionalCommitBatch(b"123", _options.Options()) + batch.incomplete_mutations = [mutation1, mutation2] + future1, future2 = tasklets.Future(), tasklets.Future() + batch.incomplete_futures = [future1, future2] + + rpc = tasklets.Future("_datastore_allocate_ids") + datastore_allocate_ids.return_value = rpc + + eventloop = mock.Mock(spec=("queue_rpc", "run")) + with in_context.new(eventloop=eventloop).use(): + batch.idle_callback() + + error = Exception("Spurious error") + rpc.set_exception(error) + + allocating_ids = batch.allocating_ids[0] + assert future1.exception() is error + assert future2.exception() is error + assert allocating_ids.result() is None + + @staticmethod + @mock.patch("google.cloud.ndb._datastore_api._process_commit") + @mock.patch("google.cloud.ndb._datastore_api._datastore_commit") + def test_commit(datastore_commit, process_commit, in_context): + batch = _api._TransactionalCommitBatch(b"123", _options.Options()) + batch.futures = object() + batch.mutations = object() + batch.transaction = b"abc" + + rpc = tasklets.Future("_datastore_commit") + datastore_commit.return_value = rpc + + eventloop = mock.Mock(spec=("queue_rpc", "run", "call_soon")) + eventloop.call_soon = lambda f, *args, **kwargs: f(*args, **kwargs) + with in_context.new(eventloop=eventloop).use(): + future = batch.commit() + + datastore_commit.assert_called_once_with( + batch.mutations, transaction=b"abc", retries=None, timeout=None + ) + rpc.set_result(None) + process_commit.assert_called_once_with(rpc, batch.futures) + + assert future.result() is None + + @staticmethod + @mock.patch("google.cloud.ndb._datastore_api._process_commit") + @mock.patch("google.cloud.ndb._datastore_api._datastore_commit") + def test_commit_error(datastore_commit, process_commit, in_context): + batch = _api._TransactionalCommitBatch(b"123", _options.Options()) + batch.futures = object() + batch.mutations = object() + batch.transaction = b"abc" + + rpc = tasklets.Future("_datastore_commit") + datastore_commit.return_value = rpc + + eventloop = mock.Mock(spec=("queue_rpc", "run", "call_soon")) + eventloop.call_soon = lambda f, *args, **kwargs: f(*args, **kwargs) + with in_context.new(eventloop=eventloop).use(): + future = batch.commit() + + datastore_commit.assert_called_once_with( + batch.mutations, transaction=b"abc", retries=None, timeout=None + ) + + error = Exception("Spurious error") + rpc.set_exception(error) + + process_commit.assert_called_once_with(rpc, batch.futures) + + assert future.exception() is error + + @staticmethod + @mock.patch("google.cloud.ndb._datastore_api._process_commit") + @mock.patch("google.cloud.ndb._datastore_api._datastore_commit") + def test_commit_allocating_ids(datastore_commit, process_commit, in_context): + batch = _api._TransactionalCommitBatch(b"123", _options.Options()) + batch.futures = object() + batch.mutations = object() + batch.transaction = b"abc" + + allocated_ids = tasklets.Future("Already allocated ids") + allocated_ids.set_result(None) + batch.allocating_ids.append(allocated_ids) + + allocating_ids = tasklets.Future("AllocateIds") + batch.allocating_ids.append(allocating_ids) + + rpc = tasklets.Future("_datastore_commit") + datastore_commit.return_value = rpc + + eventloop = mock.Mock(spec=("queue_rpc", "run", "call_soon")) + eventloop.call_soon = lambda f, *args, **kwargs: f(*args, **kwargs) + with in_context.new(eventloop=eventloop).use(): + future = batch.commit() + + datastore_commit.assert_not_called() + process_commit.assert_not_called() + + allocating_ids.set_result(None) + datastore_commit.assert_called_once_with( + batch.mutations, transaction=b"abc", retries=None, timeout=None + ) + + rpc.set_result(None) + process_commit.assert_called_once_with(rpc, batch.futures) + + assert future.result() is None + + +class Test_process_commit: + @staticmethod + def test_exception(): + error = Exception("Spurious error.") + rpc = tasklets.Future() + rpc.set_exception(error) + + future1, future2 = tasklets.Future(), tasklets.Future() + _api._process_commit(rpc, [future1, future2]) + assert future1.exception() is error + assert future2.exception() is error + + @staticmethod + def test_exception_some_already_done(): + error = Exception("Spurious error.") + rpc = tasklets.Future() + rpc.set_exception(error) + + future1, future2 = tasklets.Future(), tasklets.Future() + future2.set_result("hi mom") + _api._process_commit(rpc, [future1, future2]) + assert future1.exception() is error + assert future2.result() == "hi mom" + + @staticmethod + def test_success(): + key1 = mock.Mock(path=["one", "two"], spec=("path",)) + mutation1 = mock.Mock(key=key1, spec=("key",)) + key2 = mock.Mock(path=[], spec=("path",)) + mutation2 = mock.Mock(key=key2, spec=("key",)) + response = mock.Mock( + mutation_results=(mutation1, mutation2), spec=("mutation_results",) + ) + + rpc = tasklets.Future() + rpc.set_result(response) + + future1, future2 = tasklets.Future(), tasklets.Future() + _api._process_commit(rpc, [future1, future2]) + assert future1.result() is key1 + assert future2.result() is None + + @staticmethod + def test_success_some_already_done(): + key1 = mock.Mock(path=["one", "two"], spec=("path",)) + mutation1 = mock.Mock(key=key1, spec=("key",)) + key2 = mock.Mock(path=[], spec=("path",)) + mutation2 = mock.Mock(key=key2, spec=("key",)) + response = mock.Mock( + mutation_results=(mutation1, mutation2), spec=("mutation_results",) + ) + + rpc = tasklets.Future() + rpc.set_result(response) + + future1, future2 = tasklets.Future(), tasklets.Future() + future2.set_result(None) + _api._process_commit(rpc, [future1, future2]) + assert future1.result() is key1 + assert future2.result() is None + + +class Test_datastore_commit: + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_api.datastore_pb2") + @mock.patch("google.cloud.ndb._datastore_api.stub") + def test_wo_transaction(stub, datastore_pb2): + mutations = object() + api = stub.return_value + future = tasklets.Future() + future.set_result("response") + api.commit.future.return_value = future + assert _api._datastore_commit(mutations, None).result() == "response" + + datastore_pb2.CommitRequest.assert_called_once_with( + project_id="testing", + database_id=None, + mode=datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL, + mutations=mutations, + transaction=None, + ) + + request = datastore_pb2.CommitRequest.return_value + api.commit.future.assert_called_once_with( + request, metadata=mock.ANY, timeout=mock.ANY + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_api.datastore_pb2") + @mock.patch("google.cloud.ndb._datastore_api.stub") + def test_w_transaction(stub, datastore_pb2): + mutations = object() + api = stub.return_value + future = tasklets.Future() + future.set_result("response") + api.commit.future.return_value = future + assert _api._datastore_commit(mutations, b"tx123").result() == "response" + + datastore_pb2.CommitRequest.assert_called_once_with( + project_id="testing", + database_id=None, + mode=datastore_pb2.CommitRequest.Mode.TRANSACTIONAL, + mutations=mutations, + transaction=b"tx123", + ) + + request = datastore_pb2.CommitRequest.return_value + api.commit.future.assert_called_once_with( + request, metadata=mock.ANY, timeout=mock.ANY + ) + + +@pytest.mark.usefixtures("in_context") +def test_allocate(): + options = _options.Options() + future = _api.allocate(["one", "two"], options) + batch = _batch.get_batch(_api._AllocateIdsBatch, options) + assert batch.keys == ["one", "two"] + assert batch.futures == future._dependencies + + +@pytest.mark.usefixtures("in_context") +class Test_AllocateIdsBatch: + @staticmethod + def test_constructor(): + options = _options.Options() + batch = _api._AllocateIdsBatch(options) + assert batch.options is options + assert batch.keys == [] + assert batch.futures == [] + + @staticmethod + def test_add(): + options = _options.Options() + batch = _api._AllocateIdsBatch(options) + futures = batch.add(["key1", "key2"]) + assert batch.keys == ["key1", "key2"] + assert batch.futures == futures + + @staticmethod + @mock.patch("google.cloud.ndb._datastore_api._datastore_allocate_ids") + def test_idle_callback(_datastore_allocate_ids): + options = _options.Options() + batch = _api._AllocateIdsBatch(options) + batch.add( + [ + key_module.Key("SomeKind", None)._key, + key_module.Key("SomeKind", None)._key, + ] + ) + key_pbs = [key.to_protobuf() for key in batch.keys] + batch.idle_callback() + _datastore_allocate_ids.assert_called_once_with( + key_pbs, retries=None, timeout=None + ) + rpc = _datastore_allocate_ids.return_value + rpc.add_done_callback.assert_called_once_with(batch.allocate_ids_callback) + + @staticmethod + @mock.patch("google.cloud.ndb._datastore_api._datastore_allocate_ids") + def test_allocate_ids_callback(_datastore_allocate_ids): + options = _options.Options() + batch = _api._AllocateIdsBatch(options) + batch.futures = futures = [tasklets.Future(), tasklets.Future()] + rpc = utils.future_result(mock.Mock(keys=["key1", "key2"], spec=("key",))) + batch.allocate_ids_callback(rpc) + results = [future.result() for future in futures] + assert results == ["key1", "key2"] + + @staticmethod + @mock.patch("google.cloud.ndb._datastore_api._datastore_allocate_ids") + def test_allocate_ids_callback_w_exception(_datastore_allocate_ids): + options = _options.Options() + batch = _api._AllocateIdsBatch(options) + batch.futures = futures = [tasklets.Future(), tasklets.Future()] + error = Exception("spurious error") + rpc = tasklets.Future() + rpc.set_exception(error) + batch.allocate_ids_callback(rpc) + assert [future.exception() for future in futures] == [error, error] + + +@pytest.mark.usefixtures("in_context") +@mock.patch("google.cloud.ndb._datastore_api.datastore_pb2") +@mock.patch("google.cloud.ndb._datastore_api.stub") +def test__datastore_allocate_ids(stub, datastore_pb2): + keys = object() + api = stub.return_value + future = tasklets.Future() + future.set_result("response") + api.allocate_ids.future.return_value = future + assert _api._datastore_allocate_ids(keys).result() == "response" + + datastore_pb2.AllocateIdsRequest.assert_called_once_with( + project_id="testing", database_id=None, keys=keys + ) + + request = datastore_pb2.AllocateIdsRequest.return_value + api.allocate_ids.future.assert_called_once_with( + request, metadata=mock.ANY, timeout=mock.ANY + ) + + +@pytest.mark.usefixtures("in_context") +@mock.patch("google.cloud.ndb._datastore_api._datastore_begin_transaction") +def test_begin_transaction(_datastore_begin_transaction): + rpc = tasklets.Future("BeginTransaction()") + _datastore_begin_transaction.return_value = rpc + + future = _api.begin_transaction("read only") + _datastore_begin_transaction.assert_called_once_with( + "read only", retries=None, timeout=None + ) + rpc.set_result(mock.Mock(transaction=b"tx123", spec=("transaction"))) + + assert future.result() == b"tx123" + + +class Test_datastore_begin_transaction: + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_api.datastore_pb2") + @mock.patch("google.cloud.ndb._datastore_api.stub") + def test_read_only(stub, datastore_pb2): + api = stub.return_value + future = tasklets.Future() + future.set_result("response") + api.begin_transaction.future.return_value = future + assert _api._datastore_begin_transaction(True).result() == "response" + + datastore_pb2.TransactionOptions.assert_called_once_with( + read_only=datastore_pb2.TransactionOptions.ReadOnly() + ) + + transaction_options = datastore_pb2.TransactionOptions.return_value + datastore_pb2.BeginTransactionRequest.assert_called_once_with( + project_id="testing", + database_id=None, + transaction_options=transaction_options, + ) + + request = datastore_pb2.BeginTransactionRequest.return_value + api.begin_transaction.future.assert_called_once_with( + request, metadata=mock.ANY, timeout=mock.ANY + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_api.datastore_pb2") + @mock.patch("google.cloud.ndb._datastore_api.stub") + def test_read_write(stub, datastore_pb2): + api = stub.return_value + future = tasklets.Future() + future.set_result("response") + api.begin_transaction.future.return_value = future + assert _api._datastore_begin_transaction(False).result() == "response" + + datastore_pb2.TransactionOptions.assert_called_once_with( + read_write=datastore_pb2.TransactionOptions.ReadWrite() + ) + + transaction_options = datastore_pb2.TransactionOptions.return_value + datastore_pb2.BeginTransactionRequest.assert_called_once_with( + project_id="testing", + database_id=None, + transaction_options=transaction_options, + ) + + request = datastore_pb2.BeginTransactionRequest.return_value + api.begin_transaction.future.assert_called_once_with( + request, metadata=mock.ANY, timeout=mock.ANY + ) + + +@pytest.mark.usefixtures("in_context") +@mock.patch("google.cloud.ndb._datastore_api._datastore_rollback") +def test_rollback(_datastore_rollback): + rpc = tasklets.Future("Rollback()") + _datastore_rollback.return_value = rpc + future = _api.rollback(b"tx123") + + _datastore_rollback.assert_called_once_with(b"tx123", retries=None, timeout=None) + rpc.set_result(None) + + assert future.result() is None + + +@pytest.mark.usefixtures("in_context") +@mock.patch("google.cloud.ndb._datastore_api.datastore_pb2") +@mock.patch("google.cloud.ndb._datastore_api.stub") +def test__datastore_rollback(stub, datastore_pb2): + api = stub.return_value + future = tasklets.Future() + future.set_result("response") + api.rollback.future.return_value = future + assert _api._datastore_rollback(b"tx123").result() == "response" + + datastore_pb2.RollbackRequest.assert_called_once_with( + project_id="testing", database_id=None, transaction=b"tx123" + ) + + request = datastore_pb2.RollbackRequest.return_value + api.rollback.future.assert_called_once_with( + request, metadata=mock.ANY, timeout=mock.ANY + ) + + +def test__complete(): + class MockElement: + def __init__(self, id=None, name=None): + self.id = id + self.name = name + + assert not _api._complete(mock.Mock(path=[])) + assert not _api._complete(mock.Mock(path=[MockElement()])) + assert _api._complete(mock.Mock(path=[MockElement(id=1)])) + assert _api._complete(mock.Mock(path=[MockElement(name="himom")])) + + +@pytest.mark.parametrize( + "project_id,database_id,expected", + [ + ("a", "b", "project_id=a&database_id=b"), + ("a", "", "project_id=a"), + ("", "b", "database_id=b"), + ], +) +def test__add_routing_info(project_id, database_id, expected): + expected_new_metadata = ("x-goog-request-params", expected) + request = datastore_pb2.LookupRequest( + project_id=project_id, database_id=database_id + ) + assert _api._add_routing_info((), request) == (expected_new_metadata,) + assert _api._add_routing_info(("already=there",), request) == ( + "already=there", + expected_new_metadata, + ) + + +def test__add_routing_info_no_request_info(): + request = datastore_pb2.LookupRequest() + assert _api._add_routing_info((), request) == () diff --git a/packages/google-cloud-ndb/tests/unit/test__datastore_query.py b/packages/google-cloud-ndb/tests/unit/test__datastore_query.py new file mode 100644 index 000000000000..83d2554633de --- /dev/null +++ b/packages/google-cloud-ndb/tests/unit/test__datastore_query.py @@ -0,0 +1,2090 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import base64 + +from unittest import mock + +import pytest + +from google.cloud.datastore_v1.types import datastore as datastore_pb2 +from google.cloud.datastore_v1.types import entity as entity_pb2 +from google.cloud.datastore_v1.types import query as query_pb2 + +from google.cloud.ndb import _datastore_query +from google.cloud.ndb import context as context_module +from google.cloud.ndb import exceptions +from google.cloud.ndb import key as key_module +from google.cloud.ndb import model +from google.cloud.ndb import query as query_module +from google.cloud.ndb import tasklets + +from . import utils + + +def test_make_filter(): + expected = query_pb2.PropertyFilter( + property=query_pb2.PropertyReference(name="harry"), + op=query_pb2.PropertyFilter.Operator.EQUAL, + value=entity_pb2.Value(string_value="Harold"), + ) + assert _datastore_query.make_filter("harry", "=", "Harold") == expected + + +def test_make_composite_and_filter(): + filters = [ + query_pb2.PropertyFilter( + property=query_pb2.PropertyReference(name="harry"), + op=query_pb2.PropertyFilter.Operator.EQUAL, + value=entity_pb2.Value(string_value="Harold"), + ), + query_pb2.PropertyFilter( + property=query_pb2.PropertyReference(name="josie"), + op=query_pb2.PropertyFilter.Operator.EQUAL, + value=entity_pb2.Value(string_value="Josephine"), + ), + ] + expected = query_pb2.CompositeFilter( + op=query_pb2.CompositeFilter.Operator.AND, + filters=[ + query_pb2.Filter(property_filter=sub_filter) for sub_filter in filters + ], + ) + assert _datastore_query.make_composite_and_filter(filters) == expected + + +class Test_fetch: + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query.iterate") + def test_fetch(iterate): + results = iterate.return_value + results.has_next_async.side_effect = utils.future_results( + True, True, True, False + ) + results.next.side_effect = ["a", "b", "c", "d"] + assert _datastore_query.fetch("foo").result() == ["a", "b", "c"] + iterate.assert_called_once_with("foo") + + +class Test_count: + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query.iterate") + def test_count_brute_force(iterate): + class DummyQueryIterator: + def __init__(self, items): + self.items = list(items) + + def has_next_async(self): + return utils.future_result(bool(self.items)) + + def next(self): + return self.items.pop() + + iterate.return_value = DummyQueryIterator(range(5)) + query = query_module.QueryOptions( + filters=mock.Mock(_multiquery=True, spec=("_multiquery",)) + ) + + future = _datastore_query.count(query) + assert future.result() == 5 + iterate.assert_called_once_with( + query_module.QueryOptions(filters=query.filters, projection=["__key__"]), + raw=True, + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query.iterate") + def test_count_brute_force_with_limit(iterate): + class DummyQueryIterator: + def __init__(self, items): + self.items = list(items) + + def has_next_async(self): + return utils.future_result(bool(self.items)) + + def next(self): + return self.items.pop() + + iterate.return_value = DummyQueryIterator(range(5)) + query = query_module.QueryOptions( + filters=mock.Mock( + _multiquery=False, + _post_filters=mock.Mock(return_value=True), + spec=("_multiquery", "_post_filters"), + ), + limit=3, + ) + + future = _datastore_query.count(query) + assert future.result() == 3 + iterate.assert_called_once_with( + query_module.QueryOptions( + filters=query.filters, projection=["__key__"], limit=3 + ), + raw=True, + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query._datastore_run_query") + def test_count_by_skipping_w_a_result(run_query): + # These results should technically be impossible, but better safe than sorry. + run_query.side_effect = utils.future_results( + mock.Mock( + batch=mock.Mock( + more_results=_datastore_query.NOT_FINISHED, + skipped_results=1000, + entity_results=[], + end_cursor=b"himom", + skipped_cursor=b"dontlookatme", + spec=( + "more_results", + "skipped_results", + "entity_results", + "end_cursor", + ), + ), + spec=("batch",), + ), + mock.Mock( + batch=mock.Mock( + more_results=_datastore_query.NOT_FINISHED, + skipped_results=0, + entity_results=[], + end_cursor=b"secondCursor", + spec=( + "more_results", + "skipped_results", + "entity_results", + "end_cursor", + ), + ), + spec=("batch",), + ), + mock.Mock( + batch=mock.Mock( + more_results=_datastore_query.NO_MORE_RESULTS, + skipped_results=99, + entity_results=[object()], + end_cursor=b"ohhaithere", + skipped_cursor=b"hellodad", + spec=( + "more_results", + "skipped_results", + "entity_results", + "end_cursor", + "skipped_cursor", + ), + ), + spec=("batch",), + ), + ) + + query = query_module.QueryOptions() + future = _datastore_query.count(query) + assert future.result() == 1100 + + expected = [ + mock.call( + query_module.QueryOptions( + limit=1, + offset=10000, + projection=["__key__"], + ) + ), + ( + ( + query_module.QueryOptions( + limit=1, + offset=10000, + projection=["__key__"], + start_cursor=_datastore_query.Cursor(b"himom"), + ), + ), + {}, + ), + ( + ( + query_module.QueryOptions( + limit=1, + offset=10000, + projection=["__key__"], + start_cursor=_datastore_query.Cursor(b"secondCursor"), + ), + ), + {}, + ), + ] + assert run_query.call_args_list == expected + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query._datastore_run_query") + def test_count_by_skipping(run_query): + run_query.side_effect = utils.future_results( + mock.Mock( + batch=mock.Mock( + more_results=_datastore_query.NOT_FINISHED, + skipped_results=1000, + entity_results=[], + end_cursor=b"himom", + skipped_cursor=b"dontlookatme", + spec=( + "more_results", + "skipped_results", + "entity_results", + "end_cursor", + ), + ), + spec=("batch",), + ), + mock.Mock( + batch=mock.Mock( + more_results=_datastore_query.NO_MORE_RESULTS, + skipped_results=100, + entity_results=[], + end_cursor=b"nopenuhuh", + skipped_cursor=b"hellodad", + spec=( + "more_results", + "skipped_results", + "entity_results", + "end_cursor", + "skipped_cursor", + ), + ), + spec=("batch",), + ), + ) + + query = query_module.QueryOptions() + future = _datastore_query.count(query) + assert future.result() == 1100 + + expected = [ + mock.call( + query_module.QueryOptions( + limit=1, + offset=10000, + projection=["__key__"], + ) + ), + ( + ( + query_module.QueryOptions( + limit=1, + offset=10000, + projection=["__key__"], + start_cursor=_datastore_query.Cursor(b"himom"), + ), + ), + {}, + ), + ] + assert run_query.call_args_list == expected + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query._count_brute_force") + def test_count_by_skipping_emulator(count_brute_force): + """Regression test for #525 + + Test differences between emulator and the real Datastore. + + https://github.com/googleapis/python-ndb/issues/525 + """ + count_brute_force.return_value = utils.future_result(42) + query = query_module.QueryOptions() + with mock.patch.dict("os.environ", {"DATASTORE_EMULATOR_HOST": "emulator"}): + future = _datastore_query.count(query) + assert future.result() == 42 + assert count_brute_force.call_args_list == [mock.call(query)] + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query._datastore_run_query") + def test_count_by_skipping_with_limit(run_query): + run_query.return_value = utils.future_result( + mock.Mock( + batch=mock.Mock( + more_results=_datastore_query.MORE_RESULTS_AFTER_LIMIT, + skipped_results=99, + entity_results=[object()], + end_cursor=b"himom", + spec=( + "more_results", + "skipped_results", + "entity_results", + "end_cursor", + ), + ), + spec=("batch",), + ) + ) + + query = query_module.QueryOptions( + filters=mock.Mock( + _multiquery=False, + _post_filters=mock.Mock(return_value=None), + spec=("_multiquery", "_post_filters"), + ), + limit=100, + ) + future = _datastore_query.count(query) + assert future.result() == 100 + + run_query.assert_called_once_with( + query_module.QueryOptions( + limit=1, + offset=99, + projection=["__key__"], + filters=query.filters, + ) + ) + + +class Test_iterate: + @staticmethod + @mock.patch("google.cloud.ndb._datastore_query._QueryIteratorImpl") + def test_iterate_single(QueryIterator): + query = mock.Mock(filters=None, spec=("filters")) + iterator = QueryIterator.return_value + assert _datastore_query.iterate(query) is iterator + QueryIterator.assert_called_once_with(query, raw=False) + + @staticmethod + @mock.patch("google.cloud.ndb._datastore_query._QueryIteratorImpl") + def test_iterate_single_w_filters(QueryIterator): + query = mock.Mock( + filters=mock.Mock( + _multiquery=False, + _post_filters=mock.Mock(return_value=None), + spec=("_multiquery", "_post_filters"), + ), + spec=("filters", "_post_filters"), + ) + iterator = QueryIterator.return_value + assert _datastore_query.iterate(query) is iterator + QueryIterator.assert_called_once_with(query, raw=False) + + @staticmethod + @mock.patch("google.cloud.ndb._datastore_query._PostFilterQueryIteratorImpl") + def test_iterate_single_with_post_filter(QueryIterator): + query = mock.Mock( + filters=mock.Mock(_multiquery=False, spec=("_multiquery", "_post_filters")), + spec=("filters", "_post_filters"), + ) + iterator = QueryIterator.return_value + post_filters = query.filters._post_filters.return_value + predicate = post_filters._to_filter.return_value + assert _datastore_query.iterate(query) is iterator + QueryIterator.assert_called_once_with(query, predicate, raw=False) + post_filters._to_filter.assert_called_once_with(post=True) + + @staticmethod + @mock.patch("google.cloud.ndb._datastore_query._MultiQueryIteratorImpl") + def test_iterate_multi(MultiQueryIterator): + query = mock.Mock( + filters=mock.Mock(_multiquery=True, spec=("_multiquery",)), + spec=("filters",), + ) + iterator = MultiQueryIterator.return_value + assert _datastore_query.iterate(query) is iterator + MultiQueryIterator.assert_called_once_with(query, raw=False) + + +class TestQueryIterator: + @staticmethod + def test_has_next(): + with pytest.raises(NotImplementedError): + _datastore_query.QueryIterator().has_next() + + @staticmethod + def test_has_next_async(): + with pytest.raises(NotImplementedError): + _datastore_query.QueryIterator().has_next_async() + + @staticmethod + def test_probably_has_next(): + with pytest.raises(NotImplementedError): + _datastore_query.QueryIterator().probably_has_next() + + @staticmethod + def test_next(): + with pytest.raises(NotImplementedError): + _datastore_query.QueryIterator().next() + + @staticmethod + def test_cursor_before(): + with pytest.raises(NotImplementedError): + _datastore_query.QueryIterator().cursor_before() + + @staticmethod + def test_cursor_after(): + with pytest.raises(NotImplementedError): + _datastore_query.QueryIterator().cursor_after() + + @staticmethod + def test_index_list(): + with pytest.raises(NotImplementedError): + _datastore_query.QueryIterator().index_list() + + +class Test_QueryIteratorImpl: + @staticmethod + def test_constructor(): + iterator = _datastore_query._QueryIteratorImpl("foo") + assert iterator._query == "foo" + assert iterator._batch is None + assert iterator._index is None + assert iterator._has_next_batch is None + assert iterator._cursor_before is None + assert iterator._cursor_after is None + assert not iterator._raw + + @staticmethod + def test_constructor_raw(): + iterator = _datastore_query._QueryIteratorImpl("foo", raw=True) + assert iterator._query == "foo" + assert iterator._batch is None + assert iterator._index is None + assert iterator._has_next_batch is None + assert iterator._cursor_before is None + assert iterator._cursor_after is None + assert iterator._raw + + @staticmethod + def test___iter__(): + iterator = _datastore_query._QueryIteratorImpl("foo") + assert iter(iterator) is iterator + + @staticmethod + def test_has_next(): + iterator = _datastore_query._QueryIteratorImpl("foo") + iterator.has_next_async = mock.Mock(return_value=utils.future_result("bar")) + assert iterator.has_next() == "bar" + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_has_next_async_not_started(): + iterator = _datastore_query._QueryIteratorImpl("foo") + + def dummy_next_batch(): + iterator._index = 0 + iterator._batch = ["a", "b", "c"] + return utils.future_result(None) + + iterator._next_batch = dummy_next_batch + assert iterator.has_next_async().result() + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_has_next_async_started(): + iterator = _datastore_query._QueryIteratorImpl("foo") + iterator._index = 0 + iterator._batch = ["a", "b", "c"] + assert iterator.has_next_async().result() + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_has_next_async_finished(): + iterator = _datastore_query._QueryIteratorImpl("foo") + iterator._index = 3 + iterator._batch = ["a", "b", "c"] + assert not iterator.has_next_async().result() + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_has_next_async_next_batch(): + iterator = _datastore_query._QueryIteratorImpl("foo") + iterator._index = 3 + iterator._batch = ["a", "b", "c"] + iterator._has_next_batch = True + + def dummy_next_batch(): + iterator._index = 0 + iterator._batch = ["d", "e", "f"] + return utils.future_result(None) + + iterator._next_batch = dummy_next_batch + assert iterator.has_next_async().result() + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_has_next_async_next_batch_is_empty(): + iterator = _datastore_query._QueryIteratorImpl("foo") + iterator._index = 3 + iterator._batch = ["a", "b", "c"] + iterator._has_next_batch = True + + batches = [[], ["d", "e", "f"]] + + def dummy_next_batch(): + iterator._index = 0 + iterator._batch = batches.pop(0) + return utils.future_result(None) + + iterator._next_batch = dummy_next_batch + assert iterator.has_next_async().result() + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_has_next_async_next_batch_finished(): + iterator = _datastore_query._QueryIteratorImpl("foo") + iterator._index = 3 + iterator._batch = ["a", "b", "c"] + iterator._has_next_batch = True + + def dummy_next_batch(): + iterator._index = 3 + iterator._batch = ["d", "e", "f"] + return utils.future_result(None) + + iterator._next_batch = dummy_next_batch + assert not iterator.has_next_async().result() + + @staticmethod + def test_probably_has_next_not_started(): + iterator = _datastore_query._QueryIteratorImpl("foo") + assert iterator.probably_has_next() + + @staticmethod + def test_probably_has_next_more_batches(): + iterator = _datastore_query._QueryIteratorImpl("foo") + iterator._batch = "foo" + iterator._has_next_batch = True + assert iterator.probably_has_next() + + @staticmethod + def test_probably_has_next_in_batch(): + iterator = _datastore_query._QueryIteratorImpl("foo") + iterator._batch = ["a", "b", "c"] + iterator._index = 1 + assert iterator.probably_has_next() + + @staticmethod + def test_probably_has_next_finished(): + iterator = _datastore_query._QueryIteratorImpl("foo") + iterator._batch = ["a", "b", "c"] + iterator._index = 3 + assert not iterator.probably_has_next() + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query._datastore_run_query") + def test__next_batch(_datastore_run_query): + entity1 = mock.Mock( + key=entity_pb2.Key( + partition_id=entity_pb2.PartitionId(project_id="testing"), + path=[entity_pb2.Key.PathElement(kind="ThisKind", id=42)], + ) + ) + entity2 = mock.Mock( + key=entity_pb2.Key( + partition_id=entity_pb2.PartitionId(project_id="testing"), + path=[entity_pb2.Key.PathElement(kind="ThisKind", id=43)], + ) + ) + entity3 = mock.Mock( + key=entity_pb2.Key( + partition_id=entity_pb2.PartitionId(project_id="testing"), + path=[entity_pb2.Key.PathElement(kind="ThisKind", id=44)], + ) + ) + entity_results = [ + mock.Mock(entity=entity1, cursor=b"a"), + mock.Mock(entity=entity2, cursor=b"b"), + mock.Mock(entity=entity3, cursor=b"c"), + ] + _datastore_run_query.return_value = utils.future_result( + mock.Mock( + batch=mock.Mock( + entity_result_type=query_pb2.EntityResult.ResultType.FULL, + entity_results=entity_results, + end_cursor=b"abc", + more_results=query_pb2.QueryResultBatch.MoreResultsType.NO_MORE_RESULTS, + ) + ) + ) + + query = query_module.QueryOptions() + iterator = _datastore_query._QueryIteratorImpl(query) + assert iterator._next_batch().result() is None + assert iterator._index == 0 + assert len(iterator._batch) == 3 + assert iterator._batch[0].result_pb.entity == entity1 + assert iterator._batch[0].result_type == query_pb2.EntityResult.ResultType.FULL + assert iterator._batch[0].order_by is None + assert not iterator._has_next_batch + + @staticmethod + @mock.patch("google.cloud.ndb._datastore_query._datastore_run_query") + def test__next_batch_cached_delete(_datastore_run_query, in_context): + entity1 = mock.Mock( + key=entity_pb2.Key( + partition_id=entity_pb2.PartitionId(project_id="testing"), + path=[entity_pb2.Key.PathElement(kind="ThisKind", id=42)], + ) + ) + entity2 = mock.Mock( + key=entity_pb2.Key( + partition_id=entity_pb2.PartitionId(project_id="testing"), + path=[entity_pb2.Key.PathElement(kind="ThisKind", id=43)], + ) + ) + entity3 = mock.Mock( + key=entity_pb2.Key( + partition_id=entity_pb2.PartitionId(project_id="testing"), + path=[entity_pb2.Key.PathElement(kind="ThisKind", id=44)], + ) + ) + entity_results = [ + mock.Mock(entity=entity1, cursor=b"a"), + mock.Mock(entity=entity2, cursor=b"b"), + mock.Mock(entity=entity3, cursor=b"c"), + ] + in_context.cache[key_module.Key("ThisKind", 43)] = None + _datastore_run_query.return_value = utils.future_result( + mock.Mock( + batch=mock.Mock( + entity_result_type=query_pb2.EntityResult.ResultType.FULL, + entity_results=entity_results, + end_cursor=b"abc", + more_results=query_pb2.QueryResultBatch.MoreResultsType.NO_MORE_RESULTS, + ) + ) + ) + + query = query_module.QueryOptions() + iterator = _datastore_query._QueryIteratorImpl(query) + assert iterator._next_batch().result() is None + assert iterator._index == 0 + assert len(iterator._batch) == 2 + assert iterator._batch[0].result_pb.entity == entity1 + assert iterator._batch[0].result_type == query_pb2.EntityResult.ResultType.FULL + assert iterator._batch[0].order_by is None + assert iterator._batch[1].result_pb.entity == entity3 + assert not iterator._has_next_batch + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query._datastore_run_query") + def test__next_batch_has_more(_datastore_run_query): + entity1 = mock.Mock( + key=entity_pb2.Key( + partition_id=entity_pb2.PartitionId(project_id="testing"), + path=[entity_pb2.Key.PathElement(kind="ThisKind", id=42)], + ) + ) + entity2 = mock.Mock( + key=entity_pb2.Key( + partition_id=entity_pb2.PartitionId(project_id="testing"), + path=[entity_pb2.Key.PathElement(kind="ThisKind", id=43)], + ) + ) + entity3 = mock.Mock( + key=entity_pb2.Key( + partition_id=entity_pb2.PartitionId(project_id="testing"), + path=[entity_pb2.Key.PathElement(kind="ThisKind", id=44)], + ) + ) + entity_results = [ + mock.Mock(entity=entity1, cursor=b"a"), + mock.Mock(entity=entity2, cursor=b"b"), + mock.Mock(entity=entity3, cursor=b"c"), + ] + _datastore_run_query.return_value = utils.future_result( + mock.Mock( + batch=mock.Mock( + entity_result_type=query_pb2.EntityResult.ResultType.PROJECTION, + entity_results=entity_results, + end_cursor=b"abc", + more_results=query_pb2.QueryResultBatch.MoreResultsType.NOT_FINISHED, + ) + ) + ) + + query = query_module.QueryOptions() + iterator = _datastore_query._QueryIteratorImpl(query) + assert iterator._next_batch().result() is None + assert iterator._index == 0 + assert len(iterator._batch) == 3 + assert iterator._batch[0].result_pb.entity == entity1 + assert ( + iterator._batch[0].result_type + == query_pb2.EntityResult.ResultType.PROJECTION + ) + assert iterator._batch[0].order_by is None + assert iterator._has_next_batch + assert iterator._query.start_cursor.cursor == b"abc" + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query._datastore_run_query") + def test__next_batch_has_more_w_offset_and_limit(_datastore_run_query): + """Regression test for Issue #236 + + https://github.com/googleapis/python-ndb/issues/236 + """ + entity1 = mock.Mock( + key=entity_pb2.Key( + partition_id=entity_pb2.PartitionId(project_id="testing"), + path=[entity_pb2.Key.PathElement(kind="ThisKind", id=42)], + ) + ) + entity2 = mock.Mock( + key=entity_pb2.Key( + partition_id=entity_pb2.PartitionId(project_id="testing"), + path=[entity_pb2.Key.PathElement(kind="ThisKind", id=43)], + ) + ) + entity3 = mock.Mock( + key=entity_pb2.Key( + partition_id=entity_pb2.PartitionId(project_id="testing"), + path=[entity_pb2.Key.PathElement(kind="ThisKind", id=44)], + ) + ) + entity_results = [ + mock.Mock(entity=entity1, cursor=b"a"), + mock.Mock(entity=entity2, cursor=b"b"), + mock.Mock(entity=entity3, cursor=b"c"), + ] + _datastore_run_query.return_value = utils.future_result( + mock.Mock( + batch=mock.Mock( + entity_result_type=query_pb2.EntityResult.ResultType.FULL, + entity_results=entity_results, + end_cursor=b"abc", + skipped_results=5, + more_results=query_pb2.QueryResultBatch.MoreResultsType.NOT_FINISHED, + ) + ) + ) + + query = query_module.QueryOptions(offset=5, limit=5) + iterator = _datastore_query._QueryIteratorImpl(query) + assert iterator._next_batch().result() is None + assert iterator._index == 0 + assert len(iterator._batch) == 3 + assert iterator._batch[0].result_pb.entity == entity1 + assert iterator._batch[0].result_type == query_pb2.EntityResult.ResultType.FULL + assert iterator._batch[0].order_by is None + assert iterator._has_next_batch + assert iterator._query.start_cursor.cursor == b"abc" + assert iterator._query.offset == 0 + assert iterator._query.limit == 2 + + @staticmethod + def test_next_done(): + iterator = _datastore_query._QueryIteratorImpl("foo") + iterator.has_next = mock.Mock(return_value=False) + iterator._cursor_before = b"abc" + iterator._cursor_after = b"bcd" + with pytest.raises(StopIteration): + iterator.next() + + with pytest.raises(exceptions.BadArgumentError): + iterator.cursor_before() + + assert iterator.cursor_after() == b"bcd" + + @staticmethod + def test_next_raw(): + iterator = _datastore_query._QueryIteratorImpl("foo", raw=True) + iterator.has_next = mock.Mock(return_value=True) + iterator._index = 0 + result = mock.Mock(cursor=b"abc") + iterator._batch = [result] + assert iterator.next() is result + assert iterator._index == 1 + assert iterator._cursor_after == b"abc" + + @staticmethod + def test_next_entity(): + iterator = _datastore_query._QueryIteratorImpl("foo") + iterator.has_next = mock.Mock(return_value=True) + iterator._index = 1 + iterator._cursor_before = b"abc" + result = mock.Mock(cursor=b"bcd") + iterator._batch = [None, result] + assert iterator.next() is result.entity.return_value + assert iterator._index == 2 + assert iterator._cursor_after == b"bcd" + + @staticmethod + def test__peek(): + iterator = _datastore_query._QueryIteratorImpl("foo") + iterator._index = 1 + iterator._batch = ["a", "b", "c"] + assert iterator._peek() == "b" + + @staticmethod + def test__peek_key_error(): + iterator = _datastore_query._QueryIteratorImpl("foo") + with pytest.raises(KeyError): + iterator._peek() + + @staticmethod + def test_cursor_before(): + iterator = _datastore_query._QueryIteratorImpl("foo") + iterator._cursor_before = "foo" + assert iterator.cursor_before() == "foo" + + @staticmethod + def test_cursor_before_no_cursor(): + iterator = _datastore_query._QueryIteratorImpl("foo") + with pytest.raises(exceptions.BadArgumentError): + iterator.cursor_before() + + @staticmethod + def test_cursor_after(): + iterator = _datastore_query._QueryIteratorImpl("foo") + iterator._cursor_after = "foo" + assert iterator.cursor_after() == "foo" + + @staticmethod + def test_cursor_after_no_cursor(): + iterator = _datastore_query._QueryIteratorImpl("foo") + with pytest.raises(exceptions.BadArgumentError): + iterator.cursor_after() + + @staticmethod + def test_index_list(): + iterator = _datastore_query._QueryIteratorImpl("foo") + with pytest.raises(NotImplementedError): + iterator.index_list() + + +class Test_PostFilterQueryIteratorImpl: + @staticmethod + def test_constructor(): + foo = model.StringProperty("foo") + query = query_module.QueryOptions(offset=20, limit=10, filters=foo == "this") + predicate = object() + iterator = _datastore_query._PostFilterQueryIteratorImpl(query, predicate) + assert iterator._result_set._query == query_module.QueryOptions( + filters=foo == "this" + ) + assert iterator._offset == 20 + assert iterator._limit == 10 + assert iterator._predicate is predicate + + @staticmethod + def test_has_next(): + query = query_module.QueryOptions() + iterator = _datastore_query._PostFilterQueryIteratorImpl(query, "predicate") + iterator.has_next_async = mock.Mock(return_value=utils.future_result("bar")) + assert iterator.has_next() == "bar" + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_has_next_async_next_loaded(): + query = query_module.QueryOptions() + iterator = _datastore_query._PostFilterQueryIteratorImpl(query, "predicate") + iterator._next_result = "foo" + assert iterator.has_next_async().result() + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_iterate_async(): + def predicate(result): + return result.result % 2 == 0 + + query = query_module.QueryOptions() + iterator = _datastore_query._PostFilterQueryIteratorImpl(query, predicate) + iterator._result_set = MockResultSet([1, 2, 3, 4, 5, 6, 7]) + + @tasklets.tasklet + def iterate(): + results = [] + while (yield iterator.has_next_async()): + results.append(iterator.next()) + raise tasklets.Return(results) + + assert iterate().result() == [2, 4, 6] + + with pytest.raises(StopIteration): + iterator.next() + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_iterate_async_raw(): + def predicate(result): + return result.result % 2 == 0 + + query = query_module.QueryOptions() + iterator = _datastore_query._PostFilterQueryIteratorImpl( + query, predicate, raw=True + ) + iterator._result_set = MockResultSet([1, 2, 3, 4, 5, 6, 7]) + + @tasklets.tasklet + def iterate(): + results = [] + while (yield iterator.has_next_async()): + results.append(iterator.next()) + raise tasklets.Return(results) + + assert iterate().result() == [ + MockResult(2), + MockResult(4), + MockResult(6), + ] + + with pytest.raises(StopIteration): + iterator.next() + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_iterate_async_w_limit_and_offset(): + def predicate(result): + return result.result % 2 == 0 + + query = query_module.QueryOptions(offset=1, limit=2) + iterator = _datastore_query._PostFilterQueryIteratorImpl(query, predicate) + iterator._result_set = MockResultSet([1, 2, 3, 4, 5, 6, 7, 8]) + + @tasklets.tasklet + def iterate(): + results = [] + while (yield iterator.has_next_async()): + results.append(iterator.next()) + raise tasklets.Return(results) + + assert iterate().result() == [4, 6] + + with pytest.raises(StopIteration): + iterator.next() + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_probably_has_next_next_loaded(): + query = query_module.QueryOptions() + iterator = _datastore_query._PostFilterQueryIteratorImpl(query, "predicate") + iterator._next_result = "foo" + assert iterator.probably_has_next() is True + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_probably_has_next_delegate(): + query = query_module.QueryOptions() + iterator = _datastore_query._PostFilterQueryIteratorImpl(query, "predicate") + iterator._result_set._next_result = "foo" + assert iterator.probably_has_next() is True + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_probably_has_next_doesnt(): + query = query_module.QueryOptions() + iterator = _datastore_query._PostFilterQueryIteratorImpl(query, "predicate") + iterator._result_set._batch = [] + iterator._result_set._index = 0 + assert iterator.probably_has_next() is False + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_cursor_before(): + query = query_module.QueryOptions() + iterator = _datastore_query._PostFilterQueryIteratorImpl(query, "predicate") + iterator._cursor_before = "himom" + assert iterator.cursor_before() == "himom" + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_cursor_before_no_cursor(): + query = query_module.QueryOptions() + iterator = _datastore_query._PostFilterQueryIteratorImpl(query, "predicate") + with pytest.raises(exceptions.BadArgumentError): + iterator.cursor_before() + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_cursor_after(): + query = query_module.QueryOptions() + iterator = _datastore_query._PostFilterQueryIteratorImpl(query, "predicate") + iterator._cursor_after = "himom" + assert iterator.cursor_after() == "himom" + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_cursor_after_no_cursor(): + query = query_module.QueryOptions() + iterator = _datastore_query._PostFilterQueryIteratorImpl(query, "predicate") + with pytest.raises(exceptions.BadArgumentError): + iterator.cursor_after() + + @staticmethod + def test__more_results_after_limit(): + foo = model.StringProperty("foo") + query = query_module.QueryOptions(offset=20, limit=10, filters=foo == "this") + predicate = object() + iterator = _datastore_query._PostFilterQueryIteratorImpl(query, predicate) + assert iterator._result_set._query == query_module.QueryOptions( + filters=foo == "this" + ) + assert iterator._offset == 20 + assert iterator._limit == 10 + assert iterator._predicate is predicate + + iterator._result_set._more_results_after_limit = False + assert iterator._more_results_after_limit is False + + iterator._result_set._more_results_after_limit = True + assert iterator._more_results_after_limit is True + + +class Test_MultiQueryIteratorImpl: + @staticmethod + def test_constructor(): + foo = model.StringProperty("foo") + query = query_module.QueryOptions( + offset=20, + limit=10, + filters=query_module.OR(foo == "this", foo == "that"), + ) + iterator = _datastore_query._MultiQueryIteratorImpl(query) + assert iterator._result_sets[0]._query == query_module.QueryOptions( + filters=foo == "this" + ) + assert iterator._result_sets[1]._query == query_module.QueryOptions( + filters=foo == "that" + ) + assert not iterator._sortable + assert iterator._offset == 20 + assert iterator._limit == 10 + + @staticmethod + def test_constructor_sortable(): + foo = model.StringProperty("foo") + query = query_module.QueryOptions( + filters=query_module.OR(foo == "this", foo == "that"), + order_by=["foo"], + ) + iterator = _datastore_query._MultiQueryIteratorImpl(query) + assert iterator._result_sets[0]._query == query_module.QueryOptions( + filters=foo == "this", order_by=["foo"] + ) + assert iterator._result_sets[1]._query == query_module.QueryOptions( + filters=foo == "that", order_by=["foo"] + ) + assert iterator._sortable + + @staticmethod + def test_constructor_sortable_with_projection(): + foo = model.StringProperty("foo") + order_by = [query_module.PropertyOrder("foo")] + query = query_module.QueryOptions( + filters=query_module.OR(foo == "this", foo == "that"), + order_by=order_by, + projection=["foo"], + ) + iterator = _datastore_query._MultiQueryIteratorImpl(query) + assert iterator._result_sets[0]._query == query_module.QueryOptions( + filters=foo == "this", + order_by=order_by, + projection=["foo"], + ) + assert iterator._result_sets[1]._query == query_module.QueryOptions( + filters=foo == "that", + order_by=order_by, + projection=["foo"], + ) + assert iterator._sortable + + @staticmethod + def test_constructor_sortable_with_projection_needs_extra(): + foo = model.StringProperty("foo") + order_by = [query_module.PropertyOrder("foo")] + query = query_module.QueryOptions( + filters=query_module.OR(foo == "this", foo == "that"), + order_by=order_by, + projection=["bar"], + ) + iterator = _datastore_query._MultiQueryIteratorImpl(query) + assert iterator._result_sets[0]._query == query_module.QueryOptions( + filters=foo == "this", + order_by=order_by, + projection=["bar", "foo"], + ) + assert iterator._result_sets[1]._query == query_module.QueryOptions( + filters=foo == "that", + order_by=order_by, + projection=["bar", "foo"], + ) + assert iterator._sortable + assert not iterator._coerce_keys_only + + @staticmethod + def test_constructor_sortable_with_projection_needs_extra_keys_only(): + foo = model.StringProperty("foo") + order_by = [query_module.PropertyOrder("foo")] + query = query_module.QueryOptions( + filters=query_module.OR(foo == "this", foo == "that"), + order_by=order_by, + projection=("__key__",), + ) + iterator = _datastore_query._MultiQueryIteratorImpl(query) + assert iterator._result_sets[0]._query == query_module.QueryOptions( + filters=foo == "this", + order_by=order_by, + projection=["__key__", "foo"], + ) + assert iterator._result_sets[1]._query == query_module.QueryOptions( + filters=foo == "that", + order_by=order_by, + projection=["__key__", "foo"], + ) + assert iterator._sortable + assert iterator._coerce_keys_only + + @staticmethod + def test_iter(): + foo = model.StringProperty("foo") + query = query_module.QueryOptions( + filters=query_module.OR(foo == "this", foo == "that") + ) + iterator = _datastore_query._MultiQueryIteratorImpl(query) + assert iter(iterator) is iterator + + @staticmethod + def test_has_next(): + foo = model.StringProperty("foo") + query = query_module.QueryOptions( + filters=query_module.OR(foo == "this", foo == "that") + ) + iterator = _datastore_query._MultiQueryIteratorImpl(query) + iterator.has_next_async = mock.Mock(return_value=utils.future_result("bar")) + assert iterator.has_next() == "bar" + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_has_next_async_next_loaded(): + foo = model.StringProperty("foo") + query = query_module.QueryOptions( + filters=query_module.OR(foo == "this", foo == "that") + ) + iterator = _datastore_query._MultiQueryIteratorImpl(query) + iterator._next_result = "foo" + assert iterator.has_next_async().result() + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_has_next_async_exhausted(): + foo = model.StringProperty("foo") + query = query_module.QueryOptions( + filters=query_module.OR(foo == "this", foo == "that") + ) + iterator = _datastore_query._MultiQueryIteratorImpl(query) + iterator._result_sets = [] + assert not iterator.has_next_async().result() + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_next_with_extra_projections(): + foo = model.StringProperty("foo") + order_by = [ + query_module.PropertyOrder("foo"), + query_module.PropertyOrder("food"), + ] + query = query_module.QueryOptions( + filters=query_module.OR(foo == "this", foo == "that"), + order_by=order_by, + projection=["bar"], + ) + iterator = _datastore_query._MultiQueryIteratorImpl(query) + iterator._next_result = next_result = mock.Mock( + result_pb=mock.Mock( + entity=mock.Mock( + properties={"foo": 1, "bar": "two"}, + spec=("properties",), + ), + spec=("entity",), + ), + spec=("result_pb",), + ) + iterator._raw = True + + assert iterator.next() is next_result + assert "foo" not in next_result.result_pb.entity.properties + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_next_coerce_keys_only(): + foo = model.StringProperty("foo") + order_by = [ + query_module.PropertyOrder("foo"), + query_module.PropertyOrder("food"), + ] + query = query_module.QueryOptions( + filters=query_module.OR(foo == "this", foo == "that"), + order_by=order_by, + projection=["__key__"], + ) + iterator = _datastore_query._MultiQueryIteratorImpl(query) + iterator._next_result = next_result = mock.Mock( + result_pb=mock.Mock( + entity=mock.Mock( + properties={"foo": 1, "bar": "two"}, + spec=("properties",), + ), + spec=("entity",), + ), + entity=mock.Mock( + return_value=mock.Mock( + _key="thekey", + ) + ), + spec=("result_pb", "entity"), + ) + + assert iterator.next() == "thekey" + assert "foo" not in next_result.result_pb.entity.properties + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_iterate_async(): + foo = model.StringProperty("foo") + query = query_module.QueryOptions( + filters=query_module.OR(foo == "this", foo == "that") + ) + iterator = _datastore_query._MultiQueryIteratorImpl(query) + iterator._result_sets = [ + MockResultSet(["a", "c", "e", "g", "i"]), + MockResultSet(["b", "d", "f", "h", "j"]), + ] + + @tasklets.tasklet + def iterate(): + results = [] + while (yield iterator.has_next_async()): + results.append(iterator.next()) + raise tasklets.Return(results) + + assert iterate().result() == [ + "a", + "c", + "e", + "g", + "i", + "b", + "d", + "f", + "h", + "j", + ] + + with pytest.raises(StopIteration): + iterator.next() + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_iterate_async_raw(): + foo = model.StringProperty("foo") + query = query_module.QueryOptions( + filters=query_module.OR(foo == "this", foo == "that") + ) + iterator = _datastore_query._MultiQueryIteratorImpl(query, raw=True) + iterator._result_sets = [ + MockResultSet(["a", "c", "e", "g", "i"]), + MockResultSet(["b", "d", "f", "h", "j"]), + ] + + @tasklets.tasklet + def iterate(): + results = [] + while (yield iterator.has_next_async()): + results.append(iterator.next()) + raise tasklets.Return(results) + + assert iterate().result() == [ + MockResult("a"), + MockResult("c"), + MockResult("e"), + MockResult("g"), + MockResult("i"), + MockResult("b"), + MockResult("d"), + MockResult("f"), + MockResult("h"), + MockResult("j"), + ] + + with pytest.raises(StopIteration): + iterator.next() + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_iterate_async_ordered(): + foo = model.StringProperty("foo") + query = query_module.QueryOptions( + filters=query_module.OR(foo == "this", foo == "that") + ) + iterator = _datastore_query._MultiQueryIteratorImpl(query) + iterator._sortable = True + iterator._result_sets = [ + MockResultSet(["a", "c", "e", "g", "i"]), + MockResultSet(["b", "d", "f", "h", "j"]), + ] + + @tasklets.tasklet + def iterate(): + results = [] + while (yield iterator.has_next_async()): + results.append(iterator.next()) + raise tasklets.Return(results) + + assert iterate().result() == [ + "a", + "b", + "c", + "d", + "e", + "f", + "g", + "h", + "i", + "j", + ] + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_iterate_async_ordered_limit_and_offset(): + foo = model.StringProperty("foo") + query = query_module.QueryOptions( + offset=5, + limit=4, + filters=query_module.OR(foo == "this", foo == "that"), + ) + iterator = _datastore_query._MultiQueryIteratorImpl(query) + iterator._sortable = True + iterator._result_sets = [ + MockResultSet(["a", "c", "e", "g", "i"]), + MockResultSet(["a", "b", "d", "f", "h", "j"]), + ] + + @tasklets.tasklet + def iterate(): + results = [] + while (yield iterator.has_next_async()): + results.append(iterator.next()) + raise tasklets.Return(results) + + assert iterate().result() == ["f", "g", "h", "i"] + + @staticmethod + def test_probably_has_next_loaded(): + foo = model.StringProperty("foo") + query = query_module.QueryOptions( + filters=query_module.OR(foo == "this", foo == "that") + ) + iterator = _datastore_query._MultiQueryIteratorImpl(query) + iterator._next = "foo" + assert iterator.probably_has_next() + + @staticmethod + def test_probably_has_next_delegate(): + foo = model.StringProperty("foo") + query = query_module.QueryOptions( + filters=query_module.OR(foo == "this", foo == "that") + ) + iterator = _datastore_query._MultiQueryIteratorImpl(query) + iterator._result_sets = [MockResultSet(["a"]), MockResultSet([])] + assert iterator.probably_has_next() + + @staticmethod + def test_probably_has_next_doesnt(): + foo = model.StringProperty("foo") + query = query_module.QueryOptions( + filters=query_module.OR(foo == "this", foo == "that") + ) + iterator = _datastore_query._MultiQueryIteratorImpl(query) + iterator._result_sets = [MockResultSet([])] + assert not iterator.probably_has_next() + + @staticmethod + def test_cursor_before(): + foo = model.StringProperty("foo") + query = query_module.QueryOptions( + filters=query_module.OR(foo == "this", foo == "that") + ) + iterator = _datastore_query._MultiQueryIteratorImpl(query) + with pytest.raises(exceptions.BadArgumentError): + iterator.cursor_before() + + @staticmethod + def test_cursor_after(): + foo = model.StringProperty("foo") + query = query_module.QueryOptions( + filters=query_module.OR(foo == "this", foo == "that") + ) + iterator = _datastore_query._MultiQueryIteratorImpl(query) + with pytest.raises(exceptions.BadArgumentError): + iterator.cursor_after() + + @staticmethod + def test_index_list(): + foo = model.StringProperty("foo") + query = query_module.QueryOptions( + filters=query_module.OR(foo == "this", foo == "that") + ) + iterator = _datastore_query._MultiQueryIteratorImpl(query) + with pytest.raises(NotImplementedError): + iterator.index_list() + + +class MockResult: + def __init__(self, result): + self.result = result + self.cursor = "cursor-" + str(result) + + def entity(self): + return self.result + + @property + def result_pb(self): + return MockResultPB(self.result) + + def __eq__(self, other): + return self.result == getattr(other, "result", object()) + + +class MockResultPB: + def __init__(self, result): + self.result = result + self.entity = self + self.key = self + self._pb = MockResultPB_pb(result) + + +class MockResultPB_pb: + def __init__(self, result): + self.result = result + + def SerializeToString(self): + return self.result + + +class MockResultSet: + def __init__(self, results): + self.results = results + self.len = len(results) + self.index = 0 + + def has_next_async(self): + return utils.future_result(self.index < self.len) + + def next(self): + result = self._peek() + self.index += 1 + return MockResult(result) + + def _peek(self): + return self.results[self.index] + + def probably_has_next(self): + return self.index < self.len + + +class Test_Result: + @staticmethod + def test_constructor_defaults(): + result = _datastore_query._Result( + result_type=None, + result_pb=query_pb2.EntityResult(), + ) + assert result.order_by is None + assert result._query_options is None + + @staticmethod + def test_constructor_order_by(): + order = query_module.PropertyOrder("foo") + result = _datastore_query._Result( + result_type=None, result_pb=query_pb2.EntityResult(), order_by=[order] + ) + assert result.order_by == [order] + + @staticmethod + def test_constructor_query_options(): + options = query_module.QueryOptions(use_cache=False) + result = _datastore_query._Result( + result_type=None, result_pb=query_pb2.EntityResult(), query_options=options + ) + assert result._query_options == options + + @staticmethod + def test_total_ordering(): + def result(foo, bar=0, baz=""): + return _datastore_query._Result( + result_type=None, + result_pb=query_pb2.EntityResult( + entity=entity_pb2.Entity( + properties={ + "foo": entity_pb2.Value(string_value=foo), + "bar": entity_pb2.Value(integer_value=bar), + "baz": entity_pb2.Value(string_value=baz), + } + ) + ), + order_by=[ + query_module.PropertyOrder("foo"), + query_module.PropertyOrder("bar", reverse=True), + ], + ) + + assert result("a") < result("b") + assert result("b") > result("a") + assert result("a") != result("b") + assert result("a") == result("a") + + assert result("a", 2) < result("a", 1) + assert result("a", 1) > result("a", 2) + assert result("a", 1) != result("a", 2) + assert result("a", 1) == result("a", 1) + + assert result("a", 1, "femur") == result("a", 1, "patella") + assert result("a") != "a" + + @staticmethod + def test__compare_no_order_by(): + result = _datastore_query._Result( + None, mock.Mock(cursor=b"123", spec=("cursor",)) + ) + with pytest.raises(NotImplementedError): + result._compare("other") + + @staticmethod + def test__compare_with_order_by(): + result = _datastore_query._Result( + None, + mock.Mock( + cursor=b"123", + spec=("cursor",), + ), + [ + query_module.PropertyOrder("foo"), + query_module.PropertyOrder("bar", reverse=True), + ], + ) + assert result._compare("other") == NotImplemented + + @staticmethod + def test__compare_with_order_by_entity_key(): + def result(key_path): + key_pb = entity_pb2.Key( + partition_id=entity_pb2.PartitionId(project_id="testing"), + path=[key_path], + ) + return _datastore_query._Result( + result_type=None, + result_pb=query_pb2.EntityResult(entity=entity_pb2.Entity(key=key_pb)), + order_by=[ + query_module.PropertyOrder("__key__"), + ], + ) + + assert result(entity_pb2.Key.PathElement(kind="ThisKind", name="a")) < result( + entity_pb2.Key.PathElement(kind="ThisKind", name="b") + ) + assert result(entity_pb2.Key.PathElement(kind="ThisKind", name="b")) > result( + entity_pb2.Key.PathElement(kind="ThisKind", name="a") + ) + assert result(entity_pb2.Key.PathElement(kind="ThisKind", name="a")) != result( + entity_pb2.Key.PathElement(kind="ThisKind", name="b") + ) + + assert result(entity_pb2.Key.PathElement(kind="ThisKind", id=1)) < result( + entity_pb2.Key.PathElement(kind="ThisKind", id=2) + ) + assert result(entity_pb2.Key.PathElement(kind="ThisKind", id=2)) > result( + entity_pb2.Key.PathElement(kind="ThisKind", id=1) + ) + assert result(entity_pb2.Key.PathElement(kind="ThisKind", id=1)) != result( + entity_pb2.Key.PathElement(kind="ThisKind", id=2) + ) + + @staticmethod + def test__compare_with_order_by_key_property(): + def result(foo_key_path): + foo_key = entity_pb2.Key( + partition_id=entity_pb2.PartitionId(project_id="testing"), + path=[foo_key_path], + ) + + return _datastore_query._Result( + result_type=None, + result_pb=query_pb2.EntityResult( + entity=entity_pb2.Entity( + properties={ + "foo": entity_pb2.Value(key_value=foo_key), + } + ) + ), + order_by=[ + query_module.PropertyOrder("foo"), + ], + ) + + assert result(entity_pb2.Key.PathElement(kind="ThisKind", name="a")) < result( + entity_pb2.Key.PathElement(kind="ThisKind", name="b") + ) + assert result(entity_pb2.Key.PathElement(kind="ThisKind", name="b")) > result( + entity_pb2.Key.PathElement(kind="ThisKind", name="a") + ) + assert result(entity_pb2.Key.PathElement(kind="ThisKind", name="a")) != result( + entity_pb2.Key.PathElement(kind="ThisKind", name="b") + ) + + assert result(entity_pb2.Key.PathElement(kind="ThisKind", id=1)) < result( + entity_pb2.Key.PathElement(kind="ThisKind", id=2) + ) + assert result(entity_pb2.Key.PathElement(kind="ThisKind", id=2)) > result( + entity_pb2.Key.PathElement(kind="ThisKind", id=1) + ) + assert result(entity_pb2.Key.PathElement(kind="ThisKind", id=1)) != result( + entity_pb2.Key.PathElement(kind="ThisKind", id=2) + ) + + @staticmethod + @mock.patch("google.cloud.ndb._datastore_query.model") + def test_entity_unsupported_result_type(model): + model._entity_from_protobuf.return_value = "bar" + result = _datastore_query._Result( + "foo", + mock.Mock(entity="foo", cursor=b"123", spec=("entity", "cursor")), + ) + with pytest.raises(NotImplementedError): + result.entity() + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query.model") + def test_entity_full_entity(model): + key_pb = entity_pb2.Key( + partition_id=entity_pb2.PartitionId(project_id="testing"), + path=[entity_pb2.Key.PathElement(kind="ThisKind", id=42)], + ) + entity_pb = mock.Mock(key=key_pb) + entity = mock.Mock(key=key_module.Key("ThisKind", 42)) + model._entity_from_protobuf.return_value = entity + result = _datastore_query._Result( + _datastore_query.RESULT_TYPE_FULL, + mock.Mock(entity=entity_pb, cursor=b"123", spec=("entity", "cursor")), + ) + + context = context_module.get_context() + + assert len(context.cache) == 0 + assert result.entity() is entity + model._entity_from_protobuf.assert_called_once_with(entity_pb) + + # Regression test for #752: ensure cache is updated after querying + assert len(context.cache) == 1 + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query.model") + def test_entity_full_entity_cached(model): + key = key_module.Key("ThisKind", 42) + key_pb = entity_pb2.Key( + partition_id=entity_pb2.PartitionId(project_id="testing"), + path=[entity_pb2.Key.PathElement(kind="ThisKind", id=42)], + ) + entity = mock.Mock(key=key_pb) + cached_entity = mock.Mock(key=key_pb, _key=key) + context = context_module.get_context() + context.cache[key] = cached_entity + model._entity_from_protobuf.return_value = entity + result = _datastore_query._Result( + _datastore_query.RESULT_TYPE_FULL, + mock.Mock(entity=entity, cursor=b"123", spec=("entity", "cursor")), + ) + + assert result.entity() is not entity + assert result.entity() is cached_entity + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query.model") + def test_entity_full_entity_no_cache(model): + context = context_module.get_context() + with context.new(cache_policy=False).use(): + key_pb = entity_pb2.Key( + partition_id=entity_pb2.PartitionId(project_id="testing"), + path=[entity_pb2.Key.PathElement(kind="ThisKind", id=42)], + ) + entity = mock.Mock(key=key_pb) + model._entity_from_protobuf.return_value = entity + result = _datastore_query._Result( + _datastore_query.RESULT_TYPE_FULL, + mock.Mock(entity=entity, cursor=b"123", spec=("entity", "cursor")), + ) + assert result.entity() is entity + + # Regression test for #752: ensure cache does not grow (i.e. use up memory) + assert len(context.cache) == 0 + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query.model") + def test_entity_full_entity_no_cache_via_cache_options(model): + context = context_module.get_context() + with context.new().use(): + key_pb = entity_pb2.Key( + partition_id=entity_pb2.PartitionId(project_id="testing"), + path=[entity_pb2.Key.PathElement(kind="ThisKind", id=42)], + ) + entity = mock.Mock(key=key_pb) + model._entity_from_protobuf.return_value = entity + result = _datastore_query._Result( + _datastore_query.RESULT_TYPE_FULL, + mock.Mock(entity=entity, cursor=b"123", spec=("entity", "cursor")), + query_options=query_module.QueryOptions(use_cache=False), + ) + assert result.entity() is entity + + # Regression test for #752: ensure cache does not grow (i.e. use up memory) + assert len(context.cache) == 0 + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query.model") + def test_entity_full_entity_cache_options_true(model): + key_pb = entity_pb2.Key( + partition_id=entity_pb2.PartitionId(project_id="testing"), + path=[entity_pb2.Key.PathElement(kind="ThisKind", id=42)], + ) + entity_pb = mock.Mock(key=key_pb) + entity = mock.Mock(key=key_module.Key("ThisKind", 42)) + model._entity_from_protobuf.return_value = entity + result = _datastore_query._Result( + _datastore_query.RESULT_TYPE_FULL, + mock.Mock(entity=entity_pb, cursor=b"123", spec=("entity", "cursor")), + query_options=query_module.QueryOptions(use_cache=True), + ) + + context = context_module.get_context() + + assert len(context.cache) == 0 + assert result.entity() is entity + model._entity_from_protobuf.assert_called_once_with(entity_pb) + + # Regression test for #752: ensure cache is updated after querying + assert len(context.cache) == 1 + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_entity_key_only(): + key_pb = entity_pb2.Key( + partition_id=entity_pb2.PartitionId(project_id="testing"), + path=[entity_pb2.Key.PathElement(kind="ThisKind", id=42)], + ) + result = _datastore_query._Result( + _datastore_query.RESULT_TYPE_KEY_ONLY, + mock.Mock( + entity=mock.Mock(key=key_pb, spec=("key",)), + cursor=b"123", + spec=("entity", "cursor"), + ), + ) + assert result.entity() == key_module.Key("ThisKind", 42) + + @staticmethod + @mock.patch("google.cloud.ndb._datastore_query.model") + def test_entity_projection(model): + entity = mock.Mock(spec=("_set_projection",)) + entity_pb = mock.Mock(properties={"a": 0, "b": 1}, spec=("properties",)) + model._entity_from_protobuf.return_value = entity + result = _datastore_query._Result( + _datastore_query.RESULT_TYPE_PROJECTION, + mock.Mock(entity=entity_pb, cursor=b"123", spec=("entity", "cursor")), + ) + + assert result.entity() is entity + model._entity_from_protobuf.assert_called_once_with(entity_pb) + projection = entity._set_projection.call_args[0][0] + assert sorted(projection) == ["a", "b"] + entity._set_projection.assert_called_once_with(projection) + + +@pytest.mark.usefixtures("in_context") +class Test__query_to_protobuf: + @staticmethod + def test_no_args(): + query = query_module.QueryOptions() + assert _datastore_query._query_to_protobuf(query) == query_pb2.Query() + + @staticmethod + def test_kind(): + query = query_module.QueryOptions(kind="Foo") + assert _datastore_query._query_to_protobuf(query) == query_pb2.Query( + kind=[query_pb2.KindExpression(name="Foo")] + ) + + @staticmethod + def test_ancestor(): + key = key_module.Key("Foo", 123) + query = query_module.QueryOptions(ancestor=key) + expected_pb = query_pb2.Query( + filter=query_pb2.Filter( + property_filter=query_pb2.PropertyFilter( + property=query_pb2.PropertyReference(name="__key__"), + op=query_pb2.PropertyFilter.Operator.HAS_ANCESTOR, + ) + ) + ) + expected_pb.filter.property_filter.value.key_value._pb.CopyFrom( + key._key.to_protobuf()._pb + ) + assert _datastore_query._query_to_protobuf(query) == expected_pb + + @staticmethod + def test_ancestor_with_property_filter(): + key = key_module.Key("Foo", 123) + foo = model.StringProperty("foo") + query = query_module.QueryOptions(ancestor=key, filters=foo == "bar") + query_pb = _datastore_query._query_to_protobuf(query) + + filter_pb = query_pb2.PropertyFilter( + property=query_pb2.PropertyReference(name="foo"), + op=query_pb2.PropertyFilter.Operator.EQUAL, + value=entity_pb2.Value(string_value="bar"), + ) + ancestor_pb = query_pb2.PropertyFilter( + property=query_pb2.PropertyReference(name="__key__"), + op=query_pb2.PropertyFilter.Operator.HAS_ANCESTOR, + ) + ancestor_pb.value.key_value._pb.CopyFrom(key._key.to_protobuf()._pb) + expected_pb = query_pb2.Query( + filter=query_pb2.Filter( + composite_filter=query_pb2.CompositeFilter( + op=query_pb2.CompositeFilter.Operator.AND, + filters=[ + query_pb2.Filter(property_filter=filter_pb), + query_pb2.Filter(property_filter=ancestor_pb), + ], + ) + ) + ) + assert query_pb == expected_pb + + @staticmethod + def test_ancestor_with_composite_filter(): + key = key_module.Key("Foo", 123) + foo = model.StringProperty("foo") + food = model.StringProperty("food") + query = query_module.QueryOptions( + ancestor=key, + filters=query_module.AND(foo == "bar", food == "barn"), + ) + query_pb = _datastore_query._query_to_protobuf(query) + + filter_pb1 = query_pb2.PropertyFilter( + property=query_pb2.PropertyReference(name="foo"), + op=query_pb2.PropertyFilter.Operator.EQUAL, + value=entity_pb2.Value(string_value="bar"), + ) + filter_pb2 = query_pb2.PropertyFilter( + property=query_pb2.PropertyReference(name="food"), + op=query_pb2.PropertyFilter.Operator.EQUAL, + value=entity_pb2.Value(string_value="barn"), + ) + ancestor_pb = query_pb2.PropertyFilter( + property=query_pb2.PropertyReference(name="__key__"), + op=query_pb2.PropertyFilter.Operator.HAS_ANCESTOR, + ) + ancestor_pb.value.key_value._pb.CopyFrom(key._key.to_protobuf()._pb) + expected_pb = query_pb2.Query( + filter=query_pb2.Filter( + composite_filter=query_pb2.CompositeFilter( + op=query_pb2.CompositeFilter.Operator.AND, + filters=[ + query_pb2.Filter(property_filter=filter_pb1), + query_pb2.Filter(property_filter=filter_pb2), + query_pb2.Filter(property_filter=ancestor_pb), + ], + ) + ) + ) + assert query_pb == expected_pb + + @staticmethod + def test_projection(): + query = query_module.QueryOptions(projection=("a", "b")) + expected_pb = query_pb2.Query( + projection=[ + query_pb2.Projection(property=query_pb2.PropertyReference(name="a")), + query_pb2.Projection(property=query_pb2.PropertyReference(name="b")), + ] + ) + assert _datastore_query._query_to_protobuf(query) == expected_pb + + @staticmethod + def test_distinct_on(): + query = query_module.QueryOptions(distinct_on=("a", "b")) + expected_pb = query_pb2.Query( + distinct_on=[ + query_pb2.PropertyReference(name="a"), + query_pb2.PropertyReference(name="b"), + ] + ) + assert _datastore_query._query_to_protobuf(query) == expected_pb + + @staticmethod + def test_order_by(): + query = query_module.QueryOptions( + order_by=[ + query_module.PropertyOrder("a"), + query_module.PropertyOrder("b", reverse=True), + ] + ) + expected_pb = query_pb2.Query( + order=[ + query_pb2.PropertyOrder( + property=query_pb2.PropertyReference(name="a"), + direction=query_pb2.PropertyOrder.Direction.ASCENDING, + ), + query_pb2.PropertyOrder( + property=query_pb2.PropertyReference(name="b"), + direction=query_pb2.PropertyOrder.Direction.DESCENDING, + ), + ] + ) + assert _datastore_query._query_to_protobuf(query) == expected_pb + + @staticmethod + def test_filter_pb(): + foo = model.StringProperty("foo") + query = query_module.QueryOptions(kind="Foo", filters=(foo == "bar")) + query_pb = _datastore_query._query_to_protobuf(query) + + filter_pb = query_pb2.PropertyFilter( + property=query_pb2.PropertyReference(name="foo"), + op=query_pb2.PropertyFilter.Operator.EQUAL, + value=entity_pb2.Value(string_value="bar"), + ) + expected_pb = query_pb2.Query( + kind=[query_pb2.KindExpression(name="Foo")], + filter=query_pb2.Filter(property_filter=filter_pb), + ) + assert query_pb == expected_pb + + @staticmethod + def test_offset(): + query = query_module.QueryOptions(offset=20) + assert _datastore_query._query_to_protobuf(query) == query_pb2.Query(offset=20) + + @staticmethod + def test_limit(): + query = query_module.QueryOptions(limit=20) + expected_pb = query_pb2.Query() + expected_pb._pb.limit.value = 20 + assert _datastore_query._query_to_protobuf(query) == expected_pb + + @staticmethod + def test_start_cursor(): + query = query_module.QueryOptions(start_cursor=_datastore_query.Cursor(b"abc")) + assert _datastore_query._query_to_protobuf(query) == query_pb2.Query( + start_cursor=b"abc" + ) + + @staticmethod + def test_end_cursor(): + query = query_module.QueryOptions(end_cursor=_datastore_query.Cursor(b"abc")) + assert _datastore_query._query_to_protobuf(query) == query_pb2.Query( + end_cursor=b"abc" + ) + + +class Test__datastore_run_query: + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query._datastore_api") + def test_it(_datastore_api): + query = query_module.QueryOptions(project="testing", namespace="") + query_pb = _datastore_query._query_to_protobuf(query) + _datastore_api.make_call.return_value = utils.future_result("foo") + read_options = datastore_pb2.ReadOptions() + request = datastore_pb2.RunQueryRequest( + project_id="testing", + database_id=None, + partition_id=entity_pb2.PartitionId(project_id="testing", namespace_id=""), + query=query_pb, + read_options=read_options, + ) + metadata = ("x-goog-request-params", "project_id=testing") + _datastore_api._add_routing_info.return_value = metadata + _datastore_api.get_read_options.return_value = read_options + assert _datastore_query._datastore_run_query(query).result() == "foo" + _datastore_api.make_call.assert_called_once_with( + "run_query", request, timeout=None, metadata=metadata + ) + _datastore_api.get_read_options.assert_called_once_with(query) + + +class TestCursor: + @staticmethod + def test_constructor(): + cursor = _datastore_query.Cursor(b"123") + assert cursor.cursor == b"123" + + @staticmethod + def test_constructor_cursor_and_urlsafe(): + with pytest.raises(TypeError): + _datastore_query.Cursor(b"123", urlsafe="what?") + + @staticmethod + def test_constructor_urlsafe(): + urlsafe = base64.urlsafe_b64encode(b"123") + cursor = _datastore_query.Cursor(urlsafe=urlsafe) + assert cursor.cursor == b"123" + + @staticmethod + def test_from_websafe_string(): + urlsafe = base64.urlsafe_b64encode(b"123") + cursor = _datastore_query.Cursor.from_websafe_string(urlsafe) + assert cursor.cursor == b"123" + + @staticmethod + def test_to_websafe_string(): + urlsafe = base64.urlsafe_b64encode(b"123") + cursor = _datastore_query.Cursor(b"123") + assert cursor.to_websafe_string() == urlsafe + + @staticmethod + def test_urlsafe(): + urlsafe = base64.urlsafe_b64encode(b"123") + cursor = _datastore_query.Cursor(b"123") + assert cursor.urlsafe() == urlsafe + + @staticmethod + def test__eq__same(): + assert _datastore_query.Cursor(b"123") == _datastore_query.Cursor(b"123") + assert not _datastore_query.Cursor(b"123") != _datastore_query.Cursor(b"123") + + @staticmethod + def test__eq__different(): + assert _datastore_query.Cursor(b"123") != _datastore_query.Cursor(b"234") + assert not _datastore_query.Cursor(b"123") == _datastore_query.Cursor(b"234") + + @staticmethod + def test__eq__different_type(): + assert _datastore_query.Cursor(b"123") != b"234" + assert not _datastore_query.Cursor(b"123") == b"234" + + @staticmethod + def test__hash__(): + assert hash(_datastore_query.Cursor(b"123")) == hash(b"123") diff --git a/packages/google-cloud-ndb/tests/unit/test__datastore_types.py b/packages/google-cloud-ndb/tests/unit/test__datastore_types.py new file mode 100644 index 000000000000..f24b677a5d7f --- /dev/null +++ b/packages/google-cloud-ndb/tests/unit/test__datastore_types.py @@ -0,0 +1,79 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from unittest import mock + +import pytest + +from google.cloud.ndb import _datastore_types +from google.cloud.ndb import exceptions + + +class TestBlobKey: + @staticmethod + def test_constructor_bytes(): + value = b"abc" + blob_key = _datastore_types.BlobKey(value) + assert blob_key._blob_key is value + + @staticmethod + def test_constructor_none(): + blob_key = _datastore_types.BlobKey(None) + assert blob_key._blob_key is None + + @staticmethod + def test_constructor_too_long(): + value = b"a" * 2000 + with pytest.raises(exceptions.BadValueError): + _datastore_types.BlobKey(value) + + @staticmethod + def test_constructor_bad_type(): + value = {"a": "b"} + with pytest.raises(exceptions.BadValueError): + _datastore_types.BlobKey(value) + + @staticmethod + def test___eq__(): + blob_key1 = _datastore_types.BlobKey(b"abc") + blob_key2 = _datastore_types.BlobKey(b"def") + blob_key3 = _datastore_types.BlobKey(None) + blob_key4 = b"ghi" + blob_key5 = mock.sentinel.blob_key + assert blob_key1 == blob_key1 + assert not blob_key1 == blob_key2 + assert not blob_key1 == blob_key3 + assert not blob_key1 == blob_key4 + assert not blob_key1 == blob_key5 + + @staticmethod + def test___lt__(): + blob_key1 = _datastore_types.BlobKey(b"abc") + blob_key2 = _datastore_types.BlobKey(b"def") + blob_key3 = _datastore_types.BlobKey(None) + blob_key4 = b"ghi" + blob_key5 = mock.sentinel.blob_key + assert not blob_key1 < blob_key1 + assert blob_key1 < blob_key2 + with pytest.raises(TypeError): + blob_key1 < blob_key3 + assert blob_key1 < blob_key4 + with pytest.raises(TypeError): + blob_key1 < blob_key5 + + @staticmethod + def test___hash__(): + value = b"289399038904ndkjndjnd02mx" + blob_key = _datastore_types.BlobKey(value) + assert hash(blob_key) == hash(value) diff --git a/packages/google-cloud-ndb/tests/unit/test__eventloop.py b/packages/google-cloud-ndb/tests/unit/test__eventloop.py new file mode 100644 index 000000000000..2662008817c5 --- /dev/null +++ b/packages/google-cloud-ndb/tests/unit/test__eventloop.py @@ -0,0 +1,359 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import collections + +from unittest import mock + +import grpc +import pytest + +from google.cloud.ndb import exceptions +from google.cloud.ndb import _eventloop + + +def _Event(when=0, what="foo", args=(), kw={}): + return _eventloop._Event(when, what, args, kw) + + +class TestEventLoop: + @staticmethod + def _make_one(**attrs): + loop = _eventloop.EventLoop() + for name, value in attrs.items(): + setattr(loop, name, value) + return loop + + def test_constructor(self): + loop = self._make_one() + assert loop.current == collections.deque() + assert loop.idlers == collections.deque() + assert loop.inactive == 0 + assert loop.queue == [] + assert loop.rpcs == {} + + def test_clear_all(self): + loop = self._make_one() + loop.current.append("foo") + loop.idlers.append("bar") + loop.queue.append("baz") + loop.rpcs["qux"] = "quux" + loop.clear() + assert not loop.current + assert not loop.idlers + assert not loop.queue + assert not loop.rpcs + + # idemptotence (branch coverage) + loop.clear() + assert not loop.current + assert not loop.idlers + assert not loop.queue + assert not loop.rpcs + + def test_clear_current(self): + loop = self._make_one() + loop.current.append("foo") + loop.clear() + assert not loop.current + assert not loop.idlers + assert not loop.queue + assert not loop.rpcs + + def test_clear_idlers(self): + loop = self._make_one() + loop.idlers.append("foo") + loop.clear() + assert not loop.current + assert not loop.idlers + assert not loop.queue + assert not loop.rpcs + + def test_insert_event_right_empty_queue(self): + loop = self._make_one() + event = _Event() + loop.insort_event_right(event) + assert loop.queue == [event] + + def test_insert_event_right_head(self): + loop = self._make_one(queue=[_Event(1, "bar")]) + loop.insort_event_right(_Event(0, "foo")) + assert loop.queue == [_Event(0, "foo"), _Event(1, "bar")] + + def test_insert_event_right_tail(self): + loop = self._make_one(queue=[_Event(0, "foo")]) + loop.insort_event_right(_Event(1, "bar")) + assert loop.queue == [_Event(0, "foo"), _Event(1, "bar")] + + def test_insert_event_right_middle(self): + loop = self._make_one(queue=[_Event(0, "foo"), _Event(2, "baz")]) + loop.insort_event_right(_Event(1, "bar")) + assert loop.queue == [ + _Event(0, "foo"), + _Event(1, "bar"), + _Event(2, "baz"), + ] + + def test_insert_event_right_collision(self): + loop = self._make_one( + queue=[_Event(0, "foo"), _Event(1, "bar"), _Event(2, "baz")] + ) + loop.insort_event_right(_Event(1, "barbar")) + assert loop.queue == [ + _Event(0, "foo"), + _Event(1, "bar"), + _Event(1, "barbar"), + _Event(2, "baz"), + ] + + def test_call_soon(self): + loop = self._make_one() + loop.call_soon("foo", "bar", baz="qux") + assert list(loop.current) == [("foo", ("bar",), {"baz": "qux"})] + assert not loop.queue + + @mock.patch("google.cloud.ndb._eventloop.time") + def test_queue_call_delay(self, time): + loop = self._make_one() + time.time.return_value = 5 + loop.queue_call(5, "foo", "bar", baz="qux") + assert not loop.current + assert loop.queue == [_Event(10, "foo", ("bar",), {"baz": "qux"})] + + @mock.patch("google.cloud.ndb._eventloop.time") + def test_queue_call_absolute(self, time): + loop = self._make_one() + time.time.return_value = 5 + loop.queue_call(10e10, "foo", "bar", baz="qux") + assert not loop.current + assert loop.queue == [_Event(10e10, "foo", ("bar",), {"baz": "qux"})] + + def test_queue_rpc(self): + loop = self._make_one() + callback = mock.Mock(spec=()) + rpc = mock.Mock(spec=grpc.Future) + loop.queue_rpc(rpc, callback) + assert list(loop.rpcs.values()) == [callback] + + rpc_callback = rpc.add_done_callback.call_args[0][0] + rpc_callback(rpc) + rpc_id, rpc_result = loop.rpc_results.get() + assert rpc_result is rpc + assert loop.rpcs[rpc_id] is callback + + def test_add_idle(self): + loop = self._make_one() + loop.add_idle("foo", "bar", baz="qux") + assert list(loop.idlers) == [("foo", ("bar",), {"baz": "qux"})] + + def test_run_idle_no_idlers(self): + loop = self._make_one() + assert loop.run_idle() is False + + def test_run_idle_all_inactive(self): + loop = self._make_one() + loop.add_idle("foo") + loop.inactive = 1 + assert loop.run_idle() is False + + def test_run_idle_remove_callback(self): + callback = mock.Mock(__name__="callback") + callback.return_value = None + loop = self._make_one() + loop.add_idle(callback, "foo", bar="baz") + loop.add_idle("foo") + assert loop.run_idle() is True + callback.assert_called_once_with("foo", bar="baz") + assert len(loop.idlers) == 1 + assert loop.inactive == 0 + + def test_run_idle_did_work(self): + callback = mock.Mock(__name__="callback") + callback.return_value = True + loop = self._make_one() + loop.add_idle(callback, "foo", bar="baz") + loop.add_idle("foo") + loop.inactive = 1 + assert loop.run_idle() is True + callback.assert_called_once_with("foo", bar="baz") + assert len(loop.idlers) == 2 + assert loop.inactive == 0 + + def test_run_idle_did_no_work(self): + callback = mock.Mock(__name__="callback") + callback.return_value = False + loop = self._make_one() + loop.add_idle(callback, "foo", bar="baz") + loop.add_idle("foo") + loop.inactive = 1 + assert loop.run_idle() is True + callback.assert_called_once_with("foo", bar="baz") + assert len(loop.idlers) == 2 + assert loop.inactive == 2 + + def test_run0_nothing_to_do(self): + loop = self._make_one() + assert loop.run0() is None + + def test_run0_current(self): + callback = mock.Mock(__name__="callback") + loop = self._make_one() + loop.call_soon(callback, "foo", bar="baz") + loop.inactive = 88 + assert loop.run0() == 0 + callback.assert_called_once_with("foo", bar="baz") + assert len(loop.current) == 0 + assert loop.inactive == 0 + + def test_run0_idler(self): + callback = mock.Mock(__name__="callback") + loop = self._make_one() + loop.add_idle(callback, "foo", bar="baz") + assert loop.run0() == 0 + callback.assert_called_once_with("foo", bar="baz") + + @mock.patch("google.cloud.ndb._eventloop.time") + def test_run0_next_later(self, time): + time.time.return_value = 0 + callback = mock.Mock(__name__="callback") + loop = self._make_one() + loop.queue_call(5, callback, "foo", bar="baz") + loop.inactive = 88 + assert loop.run0() == 5 + callback.assert_not_called() + assert len(loop.queue) == 1 + assert loop.inactive == 88 + + @mock.patch("google.cloud.ndb._eventloop.time") + def test_run0_next_now(self, time): + time.time.return_value = 0 + callback = mock.Mock(__name__="callback") + loop = self._make_one() + loop.queue_call(6, "foo") + loop.queue_call(5, callback, "foo", bar="baz") + loop.inactive = 88 + time.time.return_value = 10 + assert loop.run0() == 0 + callback.assert_called_once_with("foo", bar="baz") + assert len(loop.queue) == 1 + assert loop.inactive == 0 + + @pytest.mark.usefixtures("in_context") + def test_run0_rpc(self): + rpc = mock.Mock(spec=grpc.Future) + callback = mock.Mock(spec=()) + + loop = self._make_one() + loop.rpcs["foo"] = callback + loop.rpc_results.put(("foo", rpc)) + + loop.run0() + assert len(loop.rpcs) == 0 + assert loop.rpc_results.empty() + callback.assert_called_once_with(rpc) + + def test_run1_nothing_to_do(self): + loop = self._make_one() + assert loop.run1() is False + + @mock.patch("google.cloud.ndb._eventloop.time") + def test_run1_has_work_now(self, time): + callback = mock.Mock(__name__="callback") + loop = self._make_one() + loop.call_soon(callback) + assert loop.run1() is True + time.sleep.assert_not_called() + callback.assert_called_once_with() + + @mock.patch("google.cloud.ndb._eventloop.time") + def test_run1_has_work_later(self, time): + time.time.return_value = 0 + callback = mock.Mock(__name__="callback") + loop = self._make_one() + loop.queue_call(5, callback) + assert loop.run1() is True + time.sleep.assert_called_once_with(5) + callback.assert_not_called() + + @mock.patch("google.cloud.ndb._eventloop.time") + def test_run(self, time): + time.time.return_value = 0 + + def mock_sleep(seconds): + time.time.return_value += seconds + + time.sleep = mock_sleep + idler = mock.Mock(__name__="idler") + idler.return_value = None + runnow = mock.Mock(__name__="runnow") + runlater = mock.Mock(__name__="runlater") + loop = self._make_one() + loop.add_idle(idler) + loop.call_soon(runnow) + loop.queue_call(5, runlater) + loop.run() + idler.assert_called_once_with() + runnow.assert_called_once_with() + runlater.assert_called_once_with() + + +def test_get_event_loop(context): + with pytest.raises(exceptions.ContextError): + _eventloop.get_event_loop() + with context.use(): + loop = _eventloop.get_event_loop() + assert isinstance(loop, _eventloop.EventLoop) + assert _eventloop.get_event_loop() is loop + + +def test_add_idle(context): + loop = mock.Mock(spec=("run", "add_idle")) + with context.new(eventloop=loop).use(): + _eventloop.add_idle("foo", "bar", baz="qux") + loop.add_idle.assert_called_once_with("foo", "bar", baz="qux") + + +def test_call_soon(context): + loop = mock.Mock(spec=("run", "call_soon")) + with context.new(eventloop=loop).use(): + _eventloop.call_soon("foo", "bar", baz="qux") + loop.call_soon.assert_called_once_with("foo", "bar", baz="qux") + + +def test_queue_call(context): + loop = mock.Mock(spec=("run", "queue_call")) + with context.new(eventloop=loop).use(): + _eventloop.queue_call(42, "foo", "bar", baz="qux") + loop.queue_call.assert_called_once_with(42, "foo", "bar", baz="qux") + + +def test_queue_rpc(context): + loop = mock.Mock(spec=("run", "queue_rpc")) + with context.new(eventloop=loop).use(): + _eventloop.queue_rpc("foo", "bar") + loop.queue_rpc.assert_called_once_with("foo", "bar") + + +def test_run(context): + loop = mock.Mock(spec=("run",)) + with context.new(eventloop=loop).use(): + _eventloop.run() + loop.run.assert_called_once_with() + + +def test_run1(context): + loop = mock.Mock(spec=("run", "run1")) + with context.new(eventloop=loop).use(): + _eventloop.run1() + loop.run1.assert_called_once_with() diff --git a/packages/google-cloud-ndb/tests/unit/test__gql.py b/packages/google-cloud-ndb/tests/unit/test__gql.py new file mode 100644 index 000000000000..3c96d4fe6d0a --- /dev/null +++ b/packages/google-cloud-ndb/tests/unit/test__gql.py @@ -0,0 +1,722 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime +import pytest + +from google.cloud.ndb import exceptions +from google.cloud.ndb import key +from google.cloud.ndb import model +from google.cloud.ndb import _gql as gql_module +from google.cloud.ndb import query as query_module + + +GQL_QUERY = """ + SELECT prop1, prop2 FROM SomeKind WHERE prop3>5 and prop2='xxx' + ORDER BY prop4, prop1 DESC LIMIT 10 OFFSET 5 HINT ORDER_FIRST +""" + + +class TestLiteral: + @staticmethod + def test_constructor(): + literal = gql_module.Literal("abc") + assert literal.__dict__ == {"_value": "abc"} + + @staticmethod + def test_Get(): + literal = gql_module.Literal("abc") + assert literal.Get() == "abc" + + @staticmethod + def test___repr__(): + literal = gql_module.Literal("abc") + assert literal.__repr__() == "Literal('abc')" + + @staticmethod + def test___eq__(): + literal = gql_module.Literal("abc") + literal2 = gql_module.Literal("abc") + literal3 = gql_module.Literal("xyz") + assert literal.__eq__(literal2) is True + assert literal.__eq__(literal3) is False + assert literal.__eq__(42) is NotImplemented + + +class TestGQL: + @staticmethod + def test_constructor(): + gql = gql_module.GQL(GQL_QUERY) + assert gql.kind() == "SomeKind" + + @staticmethod + def test_constructor_with_namespace(): + gql = gql_module.GQL(GQL_QUERY, namespace="test-namespace") + assert gql._namespace == "test-namespace" + + @staticmethod + def test_constructor_bad_query(): + with pytest.raises(exceptions.BadQueryError): + gql_module.GQL("BAD, BAD QUERY") + + @staticmethod + def test_constructor_incomplete_query(): + with pytest.raises(exceptions.BadQueryError): + gql_module.GQL("SELECT") + + @staticmethod + def test_constructor_extra_query(): + with pytest.raises(exceptions.BadQueryError): + gql_module.GQL("SELECT * FROM SomeKind; END") + + @staticmethod + def test_constructor_empty_where(): + with pytest.raises(exceptions.BadQueryError): + gql_module.GQL("SELECT * FROM SomeKind WHERE") + + @staticmethod + def test_constructor_empty_where_condition(): + with pytest.raises(exceptions.BadQueryError): + gql_module.GQL("SELECT * FROM SomeKind WHERE") + + @staticmethod + def test_constructor_bad_where_condition(): + with pytest.raises(exceptions.BadQueryError): + gql_module.GQL("SELECT * FROM SomeKind WHERE WE_ARE") + + @staticmethod + def test_constructor_reserved_where_identifier(): + with pytest.raises(exceptions.BadQueryError): + gql_module.GQL("SELECT * FROM SomeKind WHERE WHERE") + + @staticmethod + def test_constructor_empty_where_condition_value(): + with pytest.raises(exceptions.BadQueryError): + gql_module.GQL("SELECT * FROM SomeKind WHERE prop1=") + + @staticmethod + def test_filters(): + Literal = gql_module.Literal + gql = gql_module.GQL(GQL_QUERY) + assert gql.filters() == { + ("prop2", "="): [("nop", [Literal("xxx")])], + ("prop3", ">"): [("nop", [Literal(5)])], + } + + @staticmethod + def test_hint(): + gql = gql_module.GQL("SELECT * FROM SomeKind HINT ORDER_FIRST") + assert gql.hint() == "ORDER_FIRST" + gql = gql_module.GQL("SELECT * FROM SomeKind HINT FILTER_FIRST") + assert gql.hint() == "FILTER_FIRST" + gql = gql_module.GQL("SELECT * FROM SomeKind HINT ANCESTOR_FIRST") + assert gql.hint() == "ANCESTOR_FIRST" + with pytest.raises(exceptions.BadQueryError): + gql_module.GQL("SELECT * FROM SomeKind HINT TAKE_THE_HINT") + + @staticmethod + def test_limit(): + gql = gql_module.GQL("SELECT * FROM SomeKind LIMIT 10") + assert gql.limit() == 10 + gql = gql_module.GQL("SELECT * FROM SomeKind LIMIT 10, 5") + assert gql.limit() == 5 + with pytest.raises(exceptions.BadQueryError): + gql_module.GQL("SELECT * FROM SomeKind LIMIT 0") + with pytest.raises(exceptions.BadQueryError): + gql_module.GQL("SELECT * FROM SomeKind LIMIT -1") + with pytest.raises(exceptions.BadQueryError): + gql_module.GQL("SELECT * FROM SomeKind LIMIT -1, 10") + with pytest.raises(exceptions.BadQueryError): + gql_module.GQL("SELECT * FROM SomeKind LIMIT THE_SKY") + + @staticmethod + def test_offset(): + gql = gql_module.GQL("SELECT * FROM SomeKind") + assert gql.offset() == 0 + gql = gql_module.GQL("SELECT * FROM SomeKind OFFSET 10") + assert gql.offset() == 10 + gql = gql_module.GQL("SELECT * FROM SomeKind LIMIT 10, 5") + assert gql.offset() == 10 + with pytest.raises(exceptions.BadQueryError): + gql_module.GQL("SELECT * FROM SomeKind OFFSET -1") + with pytest.raises(exceptions.BadQueryError): + gql_module.GQL("SELECT * FROM SomeKind LIMIT 5, 10 OFFSET 8") + with pytest.raises(exceptions.BadQueryError): + gql_module.GQL("SELECT * FROM SomeKind OFFSET ZERO") + + @staticmethod + def test_orderings(): + gql = gql_module.GQL(GQL_QUERY) + assert gql.orderings() == [("prop4", 1), ("prop1", 2)] + + @staticmethod + def test_is_keys_only(): + gql = gql_module.GQL(GQL_QUERY) + assert gql.is_keys_only() is False + gql = gql_module.GQL("SELECT __key__ from SomeKind") + assert gql.is_keys_only() is True + + @staticmethod + def test_projection(): + gql = gql_module.GQL(GQL_QUERY) + assert gql.projection() == ("prop1", "prop2") + + @staticmethod + def test_is_distinct(): + gql = gql_module.GQL(GQL_QUERY) + assert gql.is_distinct() is False + gql = gql_module.GQL("SELECT DISTINCT prop1 from SomeKind") + assert gql.is_distinct() is True + + @staticmethod + def test_kind(): + gql = gql_module.GQL(GQL_QUERY) + assert gql.kind() == "SomeKind" + assert gql._entity == "SomeKind" + + @staticmethod + def test_cast(): + gql = gql_module.GQL("SELECT * FROM SomeKind WHERE prop1=user('js')") + assert gql.filters() == {("prop1", "="): [("user", [gql_module.Literal("js")])]} + + @staticmethod + def test_in_list(): + Literal = gql_module.Literal + gql = gql_module.GQL("SELECT * FROM SomeKind WHERE prop1 IN (1, 2, 3)") + assert gql.filters() == { + ("prop1", "IN"): [("list", [Literal(1), Literal(2), Literal(3)])] + } + + @staticmethod + def test_not_in_list(): + Literal = gql_module.Literal + gql = gql_module.GQL("SELECT * FROM SomeKind WHERE prop1 NOT IN (1, 2, 3)") + assert gql.filters() == { + ("prop1", "NOT_IN"): [("list", [Literal(1), Literal(2), Literal(3)])] + } + + @staticmethod + def test_cast_list_no_in(): + with pytest.raises(exceptions.BadQueryError): + gql_module.GQL("SELECT * FROM SomeKind WHERE prop1=(1, 2, 3)") + + @staticmethod + def test_not_without_in(): + with pytest.raises(exceptions.BadQueryError): + gql_module.GQL("SELECT * FROM SomeKind WHERE prop1 NOT=1") + + @staticmethod + def test_reference(): + gql = gql_module.GQL("SELECT * FROM SomeKind WHERE prop1=:ref") + assert gql.filters() == {("prop1", "="): [("nop", ["ref"])]} + + @staticmethod + def test_ancestor_is(): + gql = gql_module.GQL("SELECT * FROM SomeKind WHERE ANCESTOR IS 'AnyKind'") + assert gql.filters() == {(-1, "is"): [("nop", [gql_module.Literal("AnyKind")])]} + + @staticmethod + def test_ancestor_multiple_ancestors(): + with pytest.raises(exceptions.BadQueryError): + gql_module.GQL( + ( + "SELECT * FROM SomeKind WHERE ANCESTOR IS 'AnyKind' AND " + "ANCESTOR IS 'OtherKind'" + ) + ) + + @staticmethod + def test_ancestor_no_is(): + with pytest.raises(exceptions.BadQueryError): + gql_module.GQL("SELECT * FROM SomeKind WHERE ANCESTOR='OtherKind'") + + @staticmethod + def test_is_no_ancestor(): + with pytest.raises(exceptions.BadQueryError): + gql_module.GQL("SELECT * FROM SomeKind WHERE prop1 IS 'OtherKind'") + + @staticmethod + def test_func(): + gql = gql_module.GQL("SELECT * FROM SomeKind WHERE prop1=key(:1)") + assert gql.filters() == {("prop1", "="): [("key", [1])]} + + @staticmethod + def test_null(): + gql = gql_module.GQL("SELECT * FROM SomeKind WHERE prop1=NULL") + assert gql.filters() == {("prop1", "="): [("nop", [gql_module.Literal(None)])]} + + @staticmethod + def test_true(): + gql = gql_module.GQL("SELECT * FROM SomeKind WHERE prop1=TRUE") + assert gql.filters() == {("prop1", "="): [("nop", [gql_module.Literal(True)])]} + + @staticmethod + def test_false(): + gql = gql_module.GQL("SELECT * FROM SomeKind WHERE prop1=FALSE") + assert gql.filters() == {("prop1", "="): [("nop", [gql_module.Literal(False)])]} + + @staticmethod + def test_float(): + gql = gql_module.GQL("SELECT * FROM SomeKind WHERE prop1=3.14") + assert gql.filters() == {("prop1", "="): [("nop", [gql_module.Literal(3.14)])]} + + @staticmethod + def test_quoted_identifier(): + gql = gql_module.GQL('SELECT * FROM SomeKind WHERE "prop1"=3.14') + assert gql.filters() == {("prop1", "="): [("nop", [gql_module.Literal(3.14)])]} + + @staticmethod + def test_order_by_ascending(): + gql = gql_module.GQL("SELECT * FROM SomeKind ORDER BY prop1 ASC") + assert gql.orderings() == [("prop1", 1)] + + @staticmethod + def test_order_by_no_arg(): + with pytest.raises(exceptions.BadQueryError): + gql_module.GQL("SELECT * FROM SomeKind ORDER BY") + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_get_query(): + class SomeKind(model.Model): + prop1 = model.StringProperty() + prop2 = model.StringProperty() + prop3 = model.IntegerProperty() + prop4 = model.IntegerProperty() + + rep = ( + "Query(namespace='test-namespace', kind='SomeKind', filters=AND(FilterNode('prop2', '=', {}" + "), FilterNode('prop3', '>', 5)), order_by=[PropertyOrder(name=" + "'prop4', reverse=False), PropertyOrder(name='prop1', " + "reverse=True)], limit=10, offset=5, " + "projection=['prop1', 'prop2'])" + ) + gql = gql_module.GQL(GQL_QUERY, namespace="test-namespace") + query = gql.get_query() + compat_rep = "'xxx'" + assert repr(query) == rep.format(compat_rep) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_get_query_distinct(): + class SomeKind(model.Model): + prop1 = model.StringProperty() + + gql = gql_module.GQL("SELECT DISTINCT prop1 FROM SomeKind") + query = gql.get_query() + assert query.distinct_on == ("prop1",) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_get_query_no_kind(): + class SomeKind(model.Model): + prop1 = model.StringProperty() + + gql = gql_module.GQL("SELECT *") + query = gql.get_query() + assert query.kind is None + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_get_query_in(): + class SomeKind(model.Model): + prop1 = model.IntegerProperty() + + gql = gql_module.GQL("SELECT prop1 FROM SomeKind WHERE prop1 IN (1, 2, 3)") + query = gql.get_query() + assert query.filters == query_module.OR( + query_module.FilterNode("prop1", "=", 1), + query_module.FilterNode("prop1", "=", 2), + query_module.FilterNode("prop1", "=", 3), + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_get_query_not_in(): + class SomeKind(model.Model): + prop1 = model.IntegerProperty() + + gql = gql_module.GQL("SELECT prop1 FROM SomeKind WHERE prop1 NOT IN (1, 2)") + query = gql.get_query() + assert query.filters == query_module.FilterNode("prop1", "not_in", [1, 2]) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_get_query_in_parameterized(): + class SomeKind(model.Model): + prop1 = model.StringProperty() + + gql = gql_module.GQL("SELECT prop1 FROM SomeKind WHERE prop1 IN (:1, :2, :3)") + query = gql.get_query() + assert "'in'," in str(query.filters) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_get_query_not_in_parameterized(): + class SomeKind(model.Model): + prop1 = model.StringProperty() + + gql = gql_module.GQL( + "SELECT prop1 FROM SomeKind WHERE prop1 NOT IN (:1, :2, :3)" + ) + query = gql.get_query() + assert "'not_in'," in str(query.filters) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_get_query_keys_only(): + class SomeKind(model.Model): + prop1 = model.StringProperty() + + gql = gql_module.GQL("SELECT __key__ FROM SomeKind WHERE prop1='a'") + query = gql.get_query() + assert query.keys_only is True + assert "keys_only=True" in query.__repr__() + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_get_query_date(): + class SomeKind(model.Model): + prop1 = model.DateProperty() + + gql = gql_module.GQL( + "SELECT prop1 FROM SomeKind WHERE prop1 = Date(2020, 3, 26)" + ) + query = gql.get_query() + assert query.filters == query_module.FilterNode( + "prop1", "=", datetime.datetime(2020, 3, 26, 0, 0, 0) + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_get_query_date_one_parameter(): + class SomeKind(model.Model): + prop1 = model.DateProperty() + + gql = gql_module.GQL( + "SELECT prop1 FROM SomeKind WHERE prop1 = Date('2020-03-26')" + ) + query = gql.get_query() + assert query.filters == query_module.FilterNode( + "prop1", "=", datetime.datetime(2020, 3, 26, 0, 0, 0) + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_get_query_date_parameterized(): + class SomeKind(model.Model): + prop1 = model.DateProperty() + + gql = gql_module.GQL("SELECT prop1 FROM SomeKind WHERE prop1 = Date(:1)") + query = gql.get_query() + assert "'date'" in str(query.filters) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_get_query_date_one_parameter_bad_date(): + class SomeKind(model.Model): + prop1 = model.DateProperty() + + gql = gql_module.GQL( + "SELECT prop1 FROM SomeKind WHERE prop1 = Date('not a date')" + ) + with pytest.raises(exceptions.BadQueryError): + gql.get_query() + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_get_query_date_one_parameter_bad_type(): + class SomeKind(model.Model): + prop1 = model.DateProperty() + + gql = gql_module.GQL("SELECT prop1 FROM SomeKind WHERE prop1 = Date(42)") + with pytest.raises(exceptions.BadQueryError): + gql.get_query() + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_get_query_date_too_many_values(): + class SomeKind(model.Model): + prop1 = model.DateProperty() + + gql = gql_module.GQL( + "SELECT prop1 FROM SomeKind WHERE prop1 = Date(1, 2, 3, 4)" + ) + with pytest.raises(exceptions.BadQueryError): + gql.get_query() + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_get_query_date_bad_values(): + class SomeKind(model.Model): + prop1 = model.DateProperty() + + gql = gql_module.GQL( + "SELECT prop1 FROM SomeKind WHERE prop1 = Date(100, 200, 300)" + ) + with pytest.raises(exceptions.BadQueryError): + gql.get_query() + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_get_query_datetime(): + class SomeKind(model.Model): + prop1 = model.DateTimeProperty() + + gql = gql_module.GQL( + "SELECT prop1 FROM SomeKind WHERE prop1 = DateTime(2020, 3, 26," + "12, 45, 5)" + ) + query = gql.get_query() + assert query.filters == query_module.FilterNode( + "prop1", "=", datetime.datetime(2020, 3, 26, 12, 45, 5) + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_get_query_datetime_ome_parameter(): + class SomeKind(model.Model): + prop1 = model.DateTimeProperty() + + gql = gql_module.GQL( + "SELECT prop1 FROM SomeKind WHERE prop1 = " + "DateTime('2020-03-26 12:45:05')" + ) + query = gql.get_query() + assert query.filters == query_module.FilterNode( + "prop1", "=", datetime.datetime(2020, 3, 26, 12, 45, 5) + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_get_query_datetime_parameterized(): + class SomeKind(model.Model): + prop1 = model.DateTimeProperty() + + gql = gql_module.GQL("SELECT prop1 FROM SomeKind WHERE prop1 = DateTime(:1)") + query = gql.get_query() + assert "'datetime'" in str(query.filters) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_get_query_datetime_one_parameter_bad_date(): + class SomeKind(model.Model): + prop1 = model.DateTimeProperty() + + gql = gql_module.GQL( + "SELECT prop1 FROM SomeKind WHERE prop1 = DateTime('not a date')" + ) + with pytest.raises(exceptions.BadQueryError): + gql.get_query() + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_get_query_datetime_one_parameter_bad_type(): + class SomeKind(model.Model): + prop1 = model.DateTimeProperty() + + gql = gql_module.GQL("SELECT prop1 FROM SomeKind WHERE prop1 = DateTime(42)") + with pytest.raises(exceptions.BadQueryError): + gql.get_query() + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_get_query_datetime_bad_values(): + class SomeKind(model.Model): + prop1 = model.DateTimeProperty() + + gql = gql_module.GQL( + "SELECT prop1 FROM SomeKind WHERE prop1 = DateTime(100, 200, 300)" + ) + with pytest.raises(exceptions.BadQueryError): + gql.get_query() + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_get_query_time(): + class SomeKind(model.Model): + prop1 = model.TimeProperty() + + gql = gql_module.GQL("SELECT prop1 FROM SomeKind WHERE prop1 = Time(12, 45, 5)") + query = gql.get_query() + assert query.filters == query_module.FilterNode( + "prop1", "=", datetime.datetime(1970, 1, 1, 12, 45, 5) + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_get_query_time_one_parameter(): + class SomeKind(model.Model): + prop1 = model.TimeProperty() + + gql = gql_module.GQL( + "SELECT prop1 FROM SomeKind WHERE prop1 = Time('12:45:05')" + ) + query = gql.get_query() + assert query.filters == query_module.FilterNode( + "prop1", "=", datetime.datetime(1970, 1, 1, 12, 45, 5) + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_get_query_time_one_parameter_int(): + class SomeKind(model.Model): + prop1 = model.TimeProperty() + + gql = gql_module.GQL("SELECT prop1 FROM SomeKind WHERE prop1 = Time(12)") + query = gql.get_query() + assert query.filters == query_module.FilterNode( + "prop1", "=", datetime.datetime(1970, 1, 1, 12) + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_get_query_time_parameterized(): + class SomeKind(model.Model): + prop1 = model.TimeProperty() + + gql = gql_module.GQL("SELECT prop1 FROM SomeKind WHERE prop1 = Time(:1)") + query = gql.get_query() + assert "'time'" in str(query.filters) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_get_query_time_one_parameter_bad_time(): + class SomeKind(model.Model): + prop1 = model.TimeProperty() + + gql = gql_module.GQL( + "SELECT prop1 FROM SomeKind WHERE prop1 = Time('not a time')" + ) + with pytest.raises(exceptions.BadQueryError): + gql.get_query() + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_get_query_time_one_parameter_bad_type(): + class SomeKind(model.Model): + prop1 = model.TimeProperty() + + gql = gql_module.GQL("SELECT prop1 FROM SomeKind WHERE prop1 = Time(3.141592)") + with pytest.raises(exceptions.BadQueryError): + gql.get_query() + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_get_query_time_too_many_values(): + class SomeKind(model.Model): + prop1 = model.TimeProperty() + + gql = gql_module.GQL( + "SELECT prop1 FROM SomeKind WHERE prop1 = Time(1, 2, 3, 4)" + ) + with pytest.raises(exceptions.BadQueryError): + gql.get_query() + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_get_query_time_bad_values(): + class SomeKind(model.Model): + prop1 = model.TimeProperty() + + gql = gql_module.GQL( + "SELECT prop1 FROM SomeKind WHERE prop1 = Time(100, 200, 300)" + ) + with pytest.raises(exceptions.BadQueryError): + gql.get_query() + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_get_query_geopt(): + class SomeKind(model.Model): + prop1 = model.GeoPtProperty() + + gql = gql_module.GQL( + "SELECT prop1 FROM SomeKind WHERE prop1 = GeoPt(20.67, -100.32)" + ) + query = gql.get_query() + assert query.filters == query_module.FilterNode( + "prop1", "=", model.GeoPt(20.67, -100.32) + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_get_query_geopt_parameterized(): + class SomeKind(model.Model): + prop1 = model.GeoPtProperty() + + gql = gql_module.GQL("SELECT prop1 FROM SomeKind WHERE prop1 = GeoPt(:1)") + query = gql.get_query() + assert "'geopt'" in str(query.filters) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_get_query_geopt_too_many_values(): + class SomeKind(model.Model): + prop1 = model.GeoPtProperty() + + gql = gql_module.GQL( + "SELECT prop1 FROM SomeKind WHERE prop1 = " "GeoPt(20.67,-100.32, 1.5)" + ) + with pytest.raises(exceptions.BadQueryError): + gql.get_query() + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_get_query_key(): + class SomeKind(model.Model): + prop1 = model.KeyProperty() + + gql = gql_module.GQL( + "SELECT prop1 FROM SomeKind WHERE prop1 = Key('parent', 'c', " + "'child', 42)" + ) + query = gql.get_query() + assert query.filters == query_module.FilterNode( + "prop1", "=", key.Key("parent", "c", "child", 42) + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_get_query_key_parameterized(): + class SomeKind(model.Model): + prop1 = model.KeyProperty() + + gql = gql_module.GQL("SELECT prop1 FROM SomeKind WHERE prop1 = Key(:1)") + query = gql.get_query() + assert "'key'" in str(query.filters) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_get_query_key_odd_values(): + class SomeKind(model.Model): + prop1 = model.KeyProperty() + + gql = gql_module.GQL( + "SELECT prop1 FROM SomeKind WHERE prop1 = Key(100, 200, 300)" + ) + with pytest.raises(exceptions.BadQueryError): + gql.get_query() + + +class TestNotImplementedFUNCTIONS: + @staticmethod + def test_user(): + with pytest.raises(NotImplementedError): + gql_module.FUNCTIONS["user"]("any arg") + + @staticmethod + def test_nop(): + with pytest.raises(NotImplementedError): + gql_module.FUNCTIONS["nop"]("any arg") diff --git a/packages/google-cloud-ndb/tests/unit/test__legacy_entity_pb.py b/packages/google-cloud-ndb/tests/unit/test__legacy_entity_pb.py new file mode 100644 index 000000000000..3cbf37b58e02 --- /dev/null +++ b/packages/google-cloud-ndb/tests/unit/test__legacy_entity_pb.py @@ -0,0 +1,536 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import array +import pytest + +from google.cloud.ndb import _legacy_entity_pb as entity_module +from google.cloud.ndb import _legacy_protocol_buffer as pb_module + + +def _get_decoder(s): + a = array.array("B") + a.frombytes(s) + d = pb_module.Decoder(a, 0, len(a)) + return d + + +class TestEntityProto: + @staticmethod + def test_constructor(): + entity = entity_module.EntityProto() + assert entity.property_ == [] + + @staticmethod + def test_TryMerge_set_kind(): + entity = entity_module.EntityProto() + d = _get_decoder(b"\x20\x2a") + entity.TryMerge(d) + assert entity.has_kind() + assert entity.kind() == 42 + + @staticmethod + def test_TryMerge_set_kind_uri(): + entity = entity_module.EntityProto() + d = _get_decoder(b"\x2a\x01\x41") + entity.TryMerge(d) + assert entity.has_kind_uri() + assert entity.kind_uri().decode() == "A" + + @staticmethod + def test_TryMerge_mutable_key_app(): + entity = entity_module.EntityProto() + d = _get_decoder(b"\x6a\x03\x6a\x01\x41") + entity.TryMerge(d) + assert entity.key().has_app() + assert entity.key().app.decode() == "A" + + @staticmethod + def test_TryMerge_mutable_key_namespace(): + entity = entity_module.EntityProto() + d = _get_decoder(b"\x6a\x04\xa2\x01\x01\x42") + entity.TryMerge(d) + assert entity.key().has_name_space() + assert entity.key().name_space.decode() == "B" + + @staticmethod + def test_TryMerge_mutable_key_database(): + entity = entity_module.EntityProto() + d = _get_decoder(b"\x6a\x04\xba\x01\x01\x43") + entity.TryMerge(d) + assert entity.key().has_database_id() + assert entity.key().database_id.decode() == "C" + + @staticmethod + def test_TryMerge_mutable_key_path(): + entity = entity_module.EntityProto() + d = _get_decoder(b"\x6a\x0c\x72\x0a\x0b\x12\x01\x44\x18\x01\x22\x01\x45\x0c") + entity.TryMerge(d) + assert entity.has_key() # noqa: W601 + assert entity.key().has_path() + element = entity.key().path.element_list()[0] + assert element.has_type() + # assert element.type.decode() == "D" + assert element.type == "D" + assert element.has_id() + assert element.id == 1 + assert element.has_name() + assert element.name.decode() == "E" + + @staticmethod + def test_TryMerge_mutable_key_path_not_bytes(): + entity = entity_module.EntityProto() + d = _get_decoder(b"\x6a\x0c\x72\x0a\x0b\x12\x01\x44\x18\x01\x22\x01\x45\x0c") + entity.TryMerge(d) + assert entity.has_key() # noqa: W601 + assert entity.key().has_path() + element = entity.key().path.element_list()[0] + assert element.has_type() + assert element.type == "D" + # Not quite sure how this type could be set from a decoder string + element.set_type("E") + assert element.type == "E" + + @staticmethod + def test_TryMerge_mutable_key_path_with_skip_data(): + entity = entity_module.EntityProto() + d = _get_decoder( + b"\x6a\x0f\x72\x0d\x02\x01\x01\x0b\x12\x01\x44\x18\x01\x22\x01" b"\x45\x0c" + ) + entity.TryMerge(d) + assert entity.key().has_path() + + @staticmethod + def test_TryMerge_mutable_key_path_truncated(): + entity = entity_module.EntityProto() + d = _get_decoder(b"\x6a\x03\x72\x01\x00") + with pytest.raises(pb_module.ProtocolBufferDecodeError): + entity.TryMerge(d) + + @staticmethod + def test_TryMerge_mutable_key_path_element_with_skip_data(): + entity = entity_module.EntityProto() + d = _get_decoder( + b"\x6a\x0f\x72\x0d\x0b\x02\x01\x01\x12\x01\x44\x18\x01\x22\x01" b"\x45\x0c" + ) + entity.TryMerge(d) + assert entity.key().has_path() + + @staticmethod + def test_TryMerge_mutable_key_path_element_truncated(): + entity = entity_module.EntityProto() + d = _get_decoder(b"\x6a\x04\x72\x02\x0b\x00") + with pytest.raises(pb_module.ProtocolBufferDecodeError): + entity.TryMerge(d) + + @staticmethod + def test_TryMerge_mutable_key_with_skip_data(): + entity = entity_module.EntityProto() + d = _get_decoder(b"\x6a\x07\x02\x01\x01\xa2\x01\x01\x42") + entity.TryMerge(d) + assert entity.key().has_name_space() + assert entity.key().name_space.decode() == "B" + + @staticmethod + def test_TryMerge_mutable_key_decode_error(): + entity = entity_module.EntityProto() + d = _get_decoder(b"\x6a\x01\x00") + with pytest.raises(pb_module.ProtocolBufferDecodeError): + entity.TryMerge(d) + + @staticmethod + def test_TryMerge_property_meaning(): + entity = entity_module.EntityProto() + d = _get_decoder(b"\x72\x02\x08\x0e") + entity.TryMerge(d) + assert entity.property_list()[0].has_meaning() + meaning = entity.property_list()[0].meaning() + assert meaning == 14 + assert entity.property_list()[0].Meaning_Name(meaning) == "BLOB" + + @staticmethod + def test_TryMerge_property_meaning_uri(): + entity = entity_module.EntityProto() + d = _get_decoder(b"\x72\x03\x12\x01\x41") + entity.TryMerge(d) + assert entity.property_list()[0].has_meaning_uri() + assert entity.property_list()[0].meaning_uri().decode() == "A" + + @staticmethod + def test_TryMerge_property_name(): + entity = entity_module.EntityProto() + d = _get_decoder(b"\x72\x03\x1a\x01\x41") + entity.TryMerge(d) + assert entity.property_list()[0].has_name() + assert entity.property_list()[0].name().decode() == "A" + + @staticmethod + def test_TryMerge_property_multiple(): + entity = entity_module.EntityProto() + d = _get_decoder(b"\x72\x02\x20\x01") + entity.TryMerge(d) + assert entity.property_list()[0].has_multiple() + assert entity.property_list()[0].multiple() + + @staticmethod + def test_TryMerge_property_stashed(): + entity = entity_module.EntityProto() + d = _get_decoder(b"\x72\x02\x30\x02") + entity.TryMerge(d) + assert entity.property_list()[0].has_stashed() + assert entity.property_list()[0].stashed() == 2 + + @staticmethod + def test_TryMerge_property_computed(): + entity = entity_module.EntityProto() + d = _get_decoder(b"\x72\x02\x38\x01") + entity.TryMerge(d) + assert entity.property_list()[0].has_computed() + assert entity.property_list()[0].computed() + + @staticmethod + def test_TryMerge_property_skip_data(): + entity = entity_module.EntityProto() + d = _get_decoder(b"\x72\x05\x38\x01\x02\x01\x01") + entity.TryMerge(d) + assert entity.property_list()[0].has_computed() + + @staticmethod + def test_TryMerge_property_truncated(): + entity = entity_module.EntityProto() + d = _get_decoder(b"\x72\x01\x00") + with pytest.raises(pb_module.ProtocolBufferDecodeError): + entity.TryMerge(d) + + @staticmethod + def test_TryMerge_property_string(): + entity = entity_module.EntityProto() + d = _get_decoder(b"\x72\x08\x1a\x01\x46\x2a\x03\x1a\x01\x47") + entity.TryMerge(d) + assert entity.entity_props()["F"].decode() == "G" + + @staticmethod + def test_TryMerge_property_int(): + entity = entity_module.EntityProto() + d = _get_decoder(b"\x72\x07\x1a\x01\x46\x2a\x02\x08\x01") + entity.TryMerge(d) + assert entity.entity_props()["F"] == 1 + + @staticmethod + def test_TryMerge_property_double(): + entity = entity_module.EntityProto() + d = _get_decoder(b"\x72\x0e\x1a\x01\x46\x2a\x09\x21\x00\x00\x00\x00\x00\x00E@") + entity.TryMerge(d) + assert entity.entity_props()["F"] == 42.0 + + @staticmethod + def test_TryMerge_property_boolean(): + entity = entity_module.EntityProto() + d = _get_decoder(b"\x72\x07\x1a\x01\x46\x2a\x02\x10\x01") + entity.TryMerge(d) + assert entity.entity_props()["F"] + + @staticmethod + def test_TryMerge_property_point(): + entity = entity_module.EntityProto() + d = _get_decoder( + b"\x72\x19\x1a\x01\x46\x2a\x14\x2b\x31\x00\x00\x00\x00\x00\x00E@" + b"\x39\x00\x00\x00\x00\x00\x00E@\x2c" + ) + entity.TryMerge(d) + point = entity.entity_props()["F"] + assert point.has_x() + assert point.x() == 42.0 + assert point.has_y() + assert point.y() == 42.0 + + @staticmethod + def test_TryMerge_property_point_skip_data(): + entity = entity_module.EntityProto() + d = _get_decoder( + b"\x72\x1c\x1a\x01\x46\x2a\x17\x2b\x31\x00\x00\x00\x00\x00\x00E@" + b"\x39\x00\x00\x00\x00\x00\x00E@\x02\x01\x01\x2c" + ) + entity.TryMerge(d) + point = entity.entity_props()["F"] + assert point.has_x() + assert point.x() == 42.0 + assert point.has_y() + assert point.y() == 42.0 + + @staticmethod + def test_TryMerge_property_point_truncated(): + entity = entity_module.EntityProto() + d = _get_decoder(b"\x72\x07\x1a\x01\x46\x2a\x02\x2b\x00") + with pytest.raises(pb_module.ProtocolBufferDecodeError): + entity.TryMerge(d) + + @staticmethod + def test_TryMerge_property_reference_app(): + entity = entity_module.EntityProto() + d = _get_decoder(b"\x72\x0a\x1a\x01\x46\x2a\x05\x63\x6a\x01\x41\x64") + entity.TryMerge(d) + assert entity.entity_props()["F"].has_app() + assert entity.entity_props()["F"].app().decode() == "A" + + @staticmethod + def test_TryMerge_property_reference_pathelement(): + entity = entity_module.EntityProto() + d = _get_decoder( + b"\x72\x13\x1a\x01\x46\x2a\x0e\x63\x73\x7a\x01\x42" + b"\x8a\x01\x01\x43\x80\x01\x01\x74\x64" + ) + entity.TryMerge(d) + element = entity.entity_props()["F"].pathelement_list()[0] + assert element.has_type() + assert element.type().decode() == "B" + assert element.has_id() + assert element.id() == 1 + assert element.has_name() + assert element.name().decode() == "C" + + @staticmethod + def test_TryMerge_property_reference_pathelement_skip_data(): + entity = entity_module.EntityProto() + d = _get_decoder( + b"\x72\x16\x1a\x01\x46\x2a\x11\x63\x73\x7a\x01\x42" + b"\x8a\x01\x01\x43\x80\x01\x01\x02\x01\x01\x74\x64" + ) + entity.TryMerge(d) + element = entity.entity_props()["F"].pathelement_list()[0] + assert element.has_type() + assert element.type().decode() == "B" + assert element.has_id() + assert element.id() == 1 + assert element.has_name() + assert element.name().decode() == "C" + + @staticmethod + def test_TryMerge_property_reference_pathelement_truncated(): + entity = entity_module.EntityProto() + d = _get_decoder( + b"\x72\x14\x1a\x01\x46\x2a\x0f\x63\x73\x7a\x01\x42" + b"\x8a\x01\x01\x43\x80\x01\x01\x00\x74\x64" + ) + with pytest.raises(pb_module.ProtocolBufferDecodeError): + entity.TryMerge(d) + + @staticmethod + def test_TryMerge_property_reference_name_space(): + entity = entity_module.EntityProto() + d = _get_decoder(b"\x72\x0b\x1a\x01\x46\x2a\x06\x63\xa2\x01\x01\x41" b"\x64") + entity.TryMerge(d) + assert entity.entity_props()["F"].has_name_space() + assert entity.entity_props()["F"].name_space().decode() == "A" + + @staticmethod + def test_TryMerge_property_reference_database_id(): + entity = entity_module.EntityProto() + d = _get_decoder(b"\x72\x0b\x1a\x01\x46\x2a\x06\x63\xba\x01\x01\x41" b"\x64") + entity.TryMerge(d) + assert entity.entity_props()["F"].has_database_id() + assert entity.entity_props()["F"].database_id().decode() == "A" + + @staticmethod + def test_TryMerge_property_reference_skip_data(): + entity = entity_module.EntityProto() + d = _get_decoder( + b"\x72\x0d\x1a\x01\x46\x2a\x08\x63\x02\x01\x01\x6a" b"\x01\x41\x64" + ) + entity.TryMerge(d) + assert entity.entity_props()["F"].has_app() + assert entity.entity_props()["F"].app().decode() == "A" + + @staticmethod + def test_TryMerge_property_reference_truncated(): + entity = entity_module.EntityProto() + d = _get_decoder(b"\x72\x07\x1a\x01\x46\x2a\x02\x63\x00") + with pytest.raises(pb_module.ProtocolBufferDecodeError): + entity.TryMerge(d) + + @staticmethod + def test_TryMerge_property_value_skip_data(): + entity = entity_module.EntityProto() + d = _get_decoder(b"\x72\x0a\x1a\x01\x46\x2a\x05\x02\x01\x01\x10\x01") + entity.TryMerge(d) + assert entity.entity_props()["F"] == 1 + + @staticmethod + def test_TryMerge_property_value_truncated(): + entity = entity_module.EntityProto() + d = _get_decoder(b"\x72\x03\x2a\x01\x00") + with pytest.raises(pb_module.ProtocolBufferDecodeError): + entity.TryMerge(d) + + @staticmethod + def test_TryMerge_raw_property_string(): + entity = entity_module.EntityProto() + d = _get_decoder(b"\x7a\x08\x1a\x01\x46\x2a\x03\x1a\x01\x47") + entity.TryMerge(d) + assert entity.entity_props()["F"].decode() == "G" + + @staticmethod + def test_TryMerge_with_skip_data(): + entity = entity_module.EntityProto() + d = _get_decoder(b"\x02\x01\x01\x7a\x08\x1a\x01\x46\x2a\x03\x1a\x01" b"\x47") + entity.TryMerge(d) + assert entity.entity_props()["F"].decode() == "G" + + @staticmethod + def test_TryMerge_decode_error(): + entity = entity_module.EntityProto() + d = _get_decoder(b"\x00") + with pytest.raises(pb_module.ProtocolBufferDecodeError): + entity.TryMerge(d) + + @staticmethod + def test__get_property_value_empty_property(): + entity = entity_module.EntityProto() + prop = entity_module.PropertyValue() + assert entity._get_property_value(prop) is None + + +class TestDecoder: + @staticmethod + def test_prefixed_string_truncated(): + d = _get_decoder(b"\x10") + with pytest.raises(pb_module.ProtocolBufferDecodeError): + d.getPrefixedString() + + @staticmethod + def test_boolean_corrupted(): + d = _get_decoder(b"\x10") + with pytest.raises(pb_module.ProtocolBufferDecodeError): + d.getBoolean() + + @staticmethod + def test_double_truncated(): + d = _get_decoder(b"\x10") + with pytest.raises(pb_module.ProtocolBufferDecodeError): + d.getDouble() + + @staticmethod + def test_get8_truncated(): + d = _get_decoder(b"") + with pytest.raises(pb_module.ProtocolBufferDecodeError): + d.get8() + + @staticmethod + def test_get16(): + d = _get_decoder(b"\x01\x00") + assert d.get16() == 1 + + @staticmethod + def test_get16_truncated(): + d = _get_decoder(b"\x10") + with pytest.raises(pb_module.ProtocolBufferDecodeError): + d.get16() + + @staticmethod + def test_get32(): + d = _get_decoder(b"\x01\x00\x00\x00") + assert d.get32() == 1 + + @staticmethod + def test_getVarInt32_negative(): + d = _get_decoder(b"\xc7\xf5\xff\xff\xff\xff\xff\xff\xff\x01") + assert d.getVarInt32() == -1337 + + @staticmethod + def test_get32_truncated(): + d = _get_decoder(b"\x10") + with pytest.raises(pb_module.ProtocolBufferDecodeError): + d.get32() + + @staticmethod + def test_get64(): + d = _get_decoder(b"\x01\x00\x00\x00\x00\x00\x00\x00") + assert d.get64() == 1 + + @staticmethod + def test_getVarInt64_negative(): + d = _get_decoder(b"\xc7\xf5\xff\xff\xff\xff\xff\xff\xff\x01") + assert d.getVarInt64() == -1337 + + @staticmethod + def test_get64_truncated(): + d = _get_decoder(b"\x10") + with pytest.raises(pb_module.ProtocolBufferDecodeError): + d.get64() + + @staticmethod + def test_skip_truncated(): + d = _get_decoder(b"\x10") + with pytest.raises(pb_module.ProtocolBufferDecodeError): + d.skip(5) + + @staticmethod + def test_skipData_numeric(): + d = _get_decoder(b"\x01") + d.skipData(0) + assert d.idx == 1 + + @staticmethod + def test_skipData_double(): + d = _get_decoder(b"\x01\x00\x00\x00\x00\x00\x00\x00") + d.skipData(1) + assert d.idx == 8 + + @staticmethod + def test_skipData_float(): + d = _get_decoder(b"\x01\x00\x00\x00") + d.skipData(5) + assert d.idx == 4 + + @staticmethod + def test_skipData_startgroup(): + d = _get_decoder(b"\x00\x01\x04") + d.skipData(3) + assert d.idx == 3 + + @staticmethod + def test_skipData_endgroup_no_startgroup(): + d = _get_decoder(b"\x10") + with pytest.raises(pb_module.ProtocolBufferDecodeError): + d.skipData(4) + + @staticmethod + def test_skipData_bad_tag(): + d = _get_decoder(b"\x10") + with pytest.raises(pb_module.ProtocolBufferDecodeError): + d.skipData(7) + + @staticmethod + def test_skipData_startgroup_bad_endgoup(): + d = _get_decoder(b"\x00\x01\x2c") + with pytest.raises(pb_module.ProtocolBufferDecodeError): + d.skipData(3) + + @staticmethod + def test_getVarInt32_too_many_bytes(): + d = _get_decoder(b"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff") + with pytest.raises(pb_module.ProtocolBufferDecodeError): + d.getVarInt32() + + @staticmethod + def test_getVarInt32_corrupted(): + d = _get_decoder(b"\x81\x81\x81\x81\x81\x81\x81\x71") + with pytest.raises(pb_module.ProtocolBufferDecodeError): + d.getVarInt32() + + @staticmethod + def test_getVarInt64_too_many_bytes(): + d = _get_decoder(b"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff") + with pytest.raises(pb_module.ProtocolBufferDecodeError): + d.getVarInt64() diff --git a/packages/google-cloud-ndb/tests/unit/test__options.py b/packages/google-cloud-ndb/tests/unit/test__options.py new file mode 100644 index 000000000000..a0d00017c4de --- /dev/null +++ b/packages/google-cloud-ndb/tests/unit/test__options.py @@ -0,0 +1,194 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +from google.cloud.ndb import _datastore_api +from google.cloud.ndb import _options +from google.cloud.ndb import utils + + +class MyOptions(_options.Options): + __slots__ = ["foo", "bar"] + + +class TestOptions: + @staticmethod + def test_constructor_w_bad_arg(): + with pytest.raises(TypeError): + MyOptions(kind="test") + + @staticmethod + def test_constructor_w_deadline(): + options = MyOptions(deadline=20) + assert options.timeout == 20 + + @staticmethod + def test_constructor_w_deadline_and_timeout(): + with pytest.raises(TypeError): + MyOptions(timeout=20, deadline=10) + + @staticmethod + def test_constructor_w_use_memcache(): + options = MyOptions(use_memcache=True) + assert options.use_global_cache is True + + @staticmethod + def test_constructor_w_use_global_cache(): + options = MyOptions(use_global_cache=True) + assert options.use_global_cache is True + + @staticmethod + def test_constructor_w_use_memcache_and_global_cache(): + with pytest.raises(TypeError): + MyOptions(use_global_cache=True, use_memcache=False) + + @staticmethod + def test_constructor_w_use_datastore(): + options = MyOptions(use_datastore=False) + assert options.use_datastore is False + + @staticmethod + def test_constructor_w_use_cache(): + options = MyOptions(use_cache=20) + assert options.use_cache == 20 + + @staticmethod + def test_constructor_w_memcache_timeout(): + options = MyOptions(memcache_timeout=20) + assert options.global_cache_timeout == 20 + + @staticmethod + def test_constructor_w_global_cache_timeout(): + options = MyOptions(global_cache_timeout=20) + assert options.global_cache_timeout == 20 + + @staticmethod + def test_constructor_w_memcache_and_global_cache_timeout(): + with pytest.raises(TypeError): + MyOptions(memcache_timeout=20, global_cache_timeout=20) + + @staticmethod + def test_constructor_w_max_memcache_items(): + with pytest.raises(NotImplementedError): + MyOptions(max_memcache_items=20) + + @staticmethod + def test_constructor_w_force_writes(): + with pytest.raises(NotImplementedError): + MyOptions(force_writes=20) + + @staticmethod + def test_constructor_w_propagation(): + with pytest.raises(NotImplementedError): + MyOptions(propagation=20) + + @staticmethod + def test_constructor_w_xg(): + options = MyOptions(xg=True) + assert options == MyOptions() + + @staticmethod + def test_constructor_with_config(): + config = MyOptions(retries=5, foo="config_test") + options = MyOptions(config=config, retries=8, bar="app") + assert options.retries == 8 + assert options.bar == "app" + assert options.foo == "config_test" + + @staticmethod + def test_constructor_with_bad_config(): + with pytest.raises(TypeError): + MyOptions(config="bad") + + @staticmethod + def test___repr__(): + representation = "MyOptions(foo='test', bar='app')" + options = MyOptions(foo="test", bar="app") + assert options.__repr__() == representation + + @staticmethod + def test__eq__(): + options = MyOptions(foo="test", bar="app") + other = MyOptions(foo="test", bar="app") + otherother = MyOptions(foo="nope", bar="noway") + + assert options == other + assert options != otherother + assert options != "foo" + + @staticmethod + def test_copy(): + options = MyOptions(retries=8, bar="app") + options = options.copy(bar="app2", foo="foo") + assert options.retries == 8 + assert options.bar == "app2" + assert options.foo == "foo" + + @staticmethod + def test_items(): + options = MyOptions(retries=8, bar="app") + items = [(key, value) for key, value in options.items() if value is not None] + assert items == [("bar", "app"), ("retries", 8)] + + @staticmethod + def test_options(): + @MyOptions.options + @utils.positional(4) + def hi(mom, foo=None, retries=None, timeout=None, _options=None): + return mom, _options + + assert hi("mom", "bar", 23, timeout=42) == ( + "mom", + MyOptions(foo="bar", retries=23, timeout=42), + ) + + @staticmethod + def test_options_bad_signature(): + @utils.positional(2) + def hi(foo, mom): + pass + + with pytest.raises(TypeError): + MyOptions.options(hi) + + hi("mom", "!") # coverage + + @staticmethod + def test_options_delegated(): + @MyOptions.options + @utils.positional(4) + def hi(mom, foo=None, retries=None, timeout=None, _options=None): + return mom, _options + + options = MyOptions(foo="bar", retries=23, timeout=42) + assert hi("mom", "baz", 24, timeout=43, _options=options) == ( + "mom", + options, + ) + + +class TestReadOptions: + @staticmethod + def test_constructor_w_read_policy(): + options = _options.ReadOptions(read_policy=_datastore_api.EVENTUAL_CONSISTENCY) + assert options == _options.ReadOptions(read_consistency=_datastore_api.EVENTUAL) + + @staticmethod + def test_constructor_w_read_policy_and_read_consistency(): + with pytest.raises(TypeError): + _options.ReadOptions( + read_policy=_datastore_api.EVENTUAL_CONSISTENCY, + read_consistency=_datastore_api.EVENTUAL, + ) diff --git a/packages/google-cloud-ndb/tests/unit/test__remote.py b/packages/google-cloud-ndb/tests/unit/test__remote.py new file mode 100644 index 000000000000..0c0bf19ead5c --- /dev/null +++ b/packages/google-cloud-ndb/tests/unit/test__remote.py @@ -0,0 +1,87 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from unittest import mock + +import grpc +import pytest + +from google.cloud.ndb import exceptions +from google.cloud.ndb import _remote +from google.cloud.ndb import tasklets + + +class TestRemoteCall: + @staticmethod + def test_constructor(): + future = tasklets.Future() + call = _remote.RemoteCall(future, "info") + assert call.future is future + assert call.info == "info" + + @staticmethod + def test_repr(): + future = tasklets.Future() + call = _remote.RemoteCall(future, "a remote call") + assert repr(call) == "a remote call" + + @staticmethod + def test_exception(): + error = Exception("Spurious error") + future = tasklets.Future() + future.set_exception(error) + call = _remote.RemoteCall(future, "testing") + assert call.exception() is error + + @staticmethod + def test_exception_FutureCancelledError(): + error = grpc.FutureCancelledError() + future = tasklets.Future() + future.exception = mock.Mock(side_effect=error) + call = _remote.RemoteCall(future, "testing") + assert isinstance(call.exception(), exceptions.Cancelled) + + @staticmethod + def test_result(): + future = tasklets.Future() + future.set_result("positive") + call = _remote.RemoteCall(future, "testing") + assert call.result() == "positive" + + @staticmethod + def test_add_done_callback(): + future = tasklets.Future() + call = _remote.RemoteCall(future, "testing") + callback = mock.Mock(spec=()) + call.add_done_callback(callback) + future.set_result(None) + callback.assert_called_once_with(call) + + @staticmethod + def test_add_done_callback_already_done(): + future = tasklets.Future() + future.set_result(None) + call = _remote.RemoteCall(future, "testing") + callback = mock.Mock(spec=()) + call.add_done_callback(callback) + callback.assert_called_once_with(call) + + @staticmethod + def test_cancel(): + future = tasklets.Future() + call = _remote.RemoteCall(future, "testing") + call.cancel() + assert future.cancelled() + with pytest.raises(exceptions.Cancelled): + call.result() diff --git a/packages/google-cloud-ndb/tests/unit/test__retry.py b/packages/google-cloud-ndb/tests/unit/test__retry.py new file mode 100644 index 000000000000..35eddb27959b --- /dev/null +++ b/packages/google-cloud-ndb/tests/unit/test__retry.py @@ -0,0 +1,271 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import itertools + +from unittest import mock + +import pytest + +from google.api_core import exceptions as core_exceptions +from google.cloud.ndb import _retry +from google.cloud.ndb import tasklets + +from . import utils + + +def mock_sleep(seconds): + future = tasklets.Future() + future.set_result(None) + return future + + +class Test_retry: + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_success(): + def callback(): + return "foo" + + retry = _retry.retry_async(callback) + assert retry().result() == "foo" + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_nested_retry(): + def callback(): + def nested_callback(): + return "bar" + + nested = _retry.retry_async(nested_callback) + assert nested().result() == "bar" + + return "foo" + + retry = _retry.retry_async(callback) + assert retry().result() == "foo" + + @staticmethod + @mock.patch("google.cloud.ndb.tasklets.sleep", mock_sleep) + @pytest.mark.usefixtures("in_context") + def test_nested_retry_with_exception(): + error = Exception("Fail") + + def callback(): + def nested_callback(): + raise error + + nested = _retry.retry_async(nested_callback, retries=1) + return nested() + + with pytest.raises(core_exceptions.RetryError): + retry = _retry.retry_async(callback, retries=1) + retry().result() + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_success_callback_is_tasklet(): + tasklet_future = tasklets.Future() + + @tasklets.tasklet + def callback(): + result = yield tasklet_future + raise tasklets.Return(result) + + retry = _retry.retry_async(callback) + tasklet_future.set_result("foo") + assert retry().result() == "foo" + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_unhandled_error(): + error = Exception("Spurious error") + + def callback(): + raise error + + retry = _retry.retry_async(callback) + assert retry().exception() is error + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_api_core_unknown(): + def callback(): + raise core_exceptions.Unknown("Unknown") + + with pytest.raises(core_exceptions.RetryError) as e: + retry = _retry.retry_async(callback, retries=1) + retry().result() + + assert e.value.cause == "google.api_core.exceptions.Unknown" + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb.tasklets.sleep") + @mock.patch("google.cloud.ndb._retry.core_retry") + def test_transient_error(core_retry, sleep): + core_retry.exponential_sleep_generator.return_value = itertools.count() + core_retry.if_transient_error.return_value = True + + sleep_future = tasklets.Future("sleep") + sleep.return_value = sleep_future + + callback = mock.Mock(side_effect=[Exception("Spurious error."), "foo"]) + retry = _retry.retry_async(callback) + sleep_future.set_result(None) + assert retry().result() == "foo" + + sleep.assert_called_once_with(0) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb.tasklets.sleep") + @mock.patch("google.cloud.ndb._retry.core_retry") + def test_transient_error_callback_is_tasklet(core_retry, sleep): + """Regression test for #519 + + https://github.com/googleapis/python-ndb/issues/519 + """ + core_retry.exponential_sleep_generator.return_value = itertools.count() + core_retry.if_transient_error.return_value = True + + sleep_future = tasklets.Future("sleep") + sleep.return_value = sleep_future + + callback = mock.Mock( + side_effect=[ + utils.future_exception(Exception("Spurious error.")), + utils.future_result("foo"), + ] + ) + retry = _retry.retry_async(callback) + future = retry() + + # This is the important check for the bug in #519. We need to make sure + # that we're waiting for the sleep future to complete before moving on. + assert future.running() + + # Finish sleeping + sleep_future.set_result(None) + assert future.result() == "foo" + + sleep.assert_called_once_with(0) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb.tasklets.sleep") + @mock.patch("google.cloud.ndb._retry.core_retry") + def test_too_many_transient_errors(core_retry, sleep): + core_retry.exponential_sleep_generator.return_value = itertools.count() + core_retry.if_transient_error.return_value = True + + sleep_future = tasklets.Future("sleep") + sleep.return_value = sleep_future + sleep_future.set_result(None) + + error = Exception("Spurious error") + + def callback(): + raise error + + retry = _retry.retry_async(callback) + with pytest.raises(core_exceptions.RetryError) as error_context: + retry().check_success() + + assert error_context.value.cause is error + assert sleep.call_count == 4 + assert sleep.call_args[0][0] == 3 + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb.tasklets.sleep") + @mock.patch("google.cloud.ndb._retry.core_retry") + def test_too_many_transient_errors_pass_retries(core_retry, sleep): + core_retry.exponential_sleep_generator.return_value = itertools.count() + core_retry.if_transient_error.return_value = True + + sleep_future = tasklets.Future("sleep") + sleep.return_value = sleep_future + sleep_future.set_result(None) + + error = Exception("Spurious error") + + def callback(): + raise error + + retry = _retry.retry_async(callback, retries=4) + with pytest.raises(core_exceptions.RetryError) as error_context: + retry().check_success() + + assert error_context.value.cause is error + assert sleep.call_count == 5 + assert sleep.call_args[0][0] == 4 + + +class Test_is_transient_error: + @staticmethod + @mock.patch("google.cloud.ndb._retry.core_retry") + def test_core_says_yes(core_retry): + error = object() + core_retry.if_transient_error.return_value = True + assert _retry.is_transient_error(error) is True + core_retry.if_transient_error.assert_called_once_with(error) + + @staticmethod + @mock.patch("google.cloud.ndb._retry.core_retry") + def test_error_is_not_transient(core_retry): + error = Exception("whatever") + core_retry.if_transient_error.return_value = False + assert _retry.is_transient_error(error) is False + core_retry.if_transient_error.assert_called_once_with(error) + + @staticmethod + @mock.patch("google.cloud.ndb._retry.core_retry") + def test_unavailable(core_retry): + error = core_exceptions.ServiceUnavailable("testing") + core_retry.if_transient_error.return_value = False + assert _retry.is_transient_error(error) is True + core_retry.if_transient_error.assert_called_once_with(error) + + @staticmethod + @mock.patch("google.cloud.ndb._retry.core_retry") + def test_internal(core_retry): + error = core_exceptions.InternalServerError("testing") + core_retry.if_transient_error.return_value = False + assert _retry.is_transient_error(error) is True + core_retry.if_transient_error.assert_called_once_with(error) + + @staticmethod + @mock.patch("google.cloud.ndb._retry.core_retry") + def test_unauthenticated(core_retry): + error = core_exceptions.Unauthenticated("testing") + core_retry.if_transient_error.return_value = False + assert _retry.is_transient_error(error) is False + core_retry.if_transient_error.assert_called_once_with(error) + + @staticmethod + @mock.patch("google.cloud.ndb._retry.core_retry") + def test_aborted(core_retry): + error = core_exceptions.Aborted("testing") + core_retry.if_transient_error.return_value = False + assert _retry.is_transient_error(error) is True + core_retry.if_transient_error.assert_called_once_with(error) + + @staticmethod + @mock.patch("google.cloud.ndb._retry.core_retry") + def test_unknown(core_retry): + error = core_exceptions.Unknown("testing") + core_retry.if_transient_error.return_value = False + assert _retry.is_transient_error(error) is True + core_retry.if_transient_error.assert_called_once_with(error) diff --git a/packages/google-cloud-ndb/tests/unit/test__transaction.py b/packages/google-cloud-ndb/tests/unit/test__transaction.py new file mode 100644 index 000000000000..c18590edca22 --- /dev/null +++ b/packages/google-cloud-ndb/tests/unit/test__transaction.py @@ -0,0 +1,749 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import itertools +import logging + +from unittest import mock + +import pytest + +from google.api_core import exceptions as core_exceptions +from google.cloud.ndb import context as context_module +from google.cloud.ndb import exceptions +from google.cloud.ndb import tasklets +from google.cloud.ndb import _transaction + + +class Test_in_transaction: + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_false(): + assert _transaction.in_transaction() is False + + @staticmethod + def test_true(in_context): + with in_context.new(transaction=b"tx123").use(): + assert _transaction.in_transaction() is True + + +class Test_transaction: + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_propagation_nested(): + with pytest.raises(exceptions.BadRequestError): + _transaction.transaction( + None, propagation=context_module.TransactionOptions.NESTED + ) + + @staticmethod + def test_already_in_transaction(in_context): + with in_context.new(transaction=b"tx123").use(): + with pytest.raises(NotImplementedError): + _transaction.transaction(None) + + @staticmethod + def test_transaction_inherits_and_merges_cache(in_context): + original_cache = in_context.cache + in_context.cache["test"] = "original value" + with in_context.new(transaction=b"tx123").use() as new_context: + assert new_context.cache is not original_cache + assert new_context.cache["test"] == original_cache["test"] + new_context.cache["test"] = "new_value" + assert new_context.cache["test"] != original_cache["test"] + assert in_context.cache["test"] == "new_value" + + @staticmethod + @mock.patch("google.cloud.ndb._transaction.transaction_async") + def test_success(transaction_async): + transaction_async.return_value.result.return_value = 42 + assert _transaction.transaction("callback") == 42 + transaction_async.assert_called_once_with( + "callback", + read_only=False, + retries=3, + join=False, + xg=True, + propagation=None, + ) + + +class Test_transaction_async: + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_api") + def test_success(_datastore_api): + context_module.get_context().cache["foo"] = "bar" + + def callback(): + # The transaction uses its own in-memory cache, which should be empty in + # the transaction context and not include the key set above. + context = context_module.get_context() + assert not context.cache + + return "I tried, momma." + + begin_future = tasklets.Future("begin transaction") + _datastore_api.begin_transaction.return_value = begin_future + + commit_future = tasklets.Future("commit transaction") + _datastore_api.commit.return_value = commit_future + + future = _transaction.transaction_async(callback) + + _datastore_api.begin_transaction.assert_called_once_with(False, retries=0) + begin_future.set_result(b"tx123") + + _datastore_api.commit.assert_called_once_with(b"tx123", retries=0) + commit_future.set_result(None) + + assert future.result() == "I tried, momma." + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_api") + def test_success_w_callbacks(_datastore_api): + context_module.get_context().cache["foo"] = "bar" + on_commit_callback = mock.Mock() + transaction_complete_callback = mock.Mock() + + def callback(): + # The transaction uses its own in-memory cache, which should be empty in + # the transaction context and not include the key set above. + context = context_module.get_context() + assert not context.cache + + context.call_on_commit(on_commit_callback) + context.call_on_transaction_complete(transaction_complete_callback) + return "I tried, momma." + + begin_future = tasklets.Future("begin transaction") + _datastore_api.begin_transaction.return_value = begin_future + + commit_future = tasklets.Future("commit transaction") + _datastore_api.commit.return_value = commit_future + + future = _transaction.transaction_async(callback) + + _datastore_api.begin_transaction.assert_called_once_with(False, retries=0) + begin_future.set_result(b"tx123") + + _datastore_api.commit.assert_called_once_with(b"tx123", retries=0) + commit_future.set_result(None) + + assert future.result() == "I tried, momma." + on_commit_callback.assert_called_once_with() + transaction_complete_callback.assert_called_once_with() + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_api") + def test_failure_w_callbacks(_datastore_api): + class SpuriousError(Exception): + pass + + context_module.get_context().cache["foo"] = "bar" + on_commit_callback = mock.Mock() + transaction_complete_callback = mock.Mock() + + def callback(): + context = context_module.get_context() + assert not context.cache + context.call_on_commit(on_commit_callback) + context.call_on_transaction_complete(transaction_complete_callback) + raise SpuriousError() + + begin_future = tasklets.Future("begin transaction") + _datastore_api.begin_transaction.return_value = begin_future + + rollback_future = tasklets.Future("rollback transaction") + _datastore_api.rollback.return_value = rollback_future + + future = _transaction.transaction_async(callback) + + _datastore_api.begin_transaction.assert_called_once_with(False, retries=0) + begin_future.set_result(b"tx123") + + _datastore_api.commit.assert_not_called() + _datastore_api.rollback.assert_called_once_with(b"tx123") + rollback_future.set_result(None) + + with pytest.raises(SpuriousError): + future.result() + + on_commit_callback.assert_not_called() + transaction_complete_callback.assert_called_once_with() + + @staticmethod + def test_success_join(in_context): + def callback(): + return "I tried, momma." + + with in_context.new(transaction=b"tx123").use(): + future = _transaction.transaction_async(callback, join=True) + + assert future.result() == "I tried, momma." + + @staticmethod + def test_success_join_callback_returns_future(in_context): + future = tasklets.Future() + + def callback(): + return future + + with in_context.new(transaction=b"tx123").use(): + future = _transaction.transaction_async(callback, join=True) + + future.set_result("I tried, momma.") + assert future.result() == "I tried, momma." + + @staticmethod + def test_success_propagation_mandatory(in_context): + def callback(): + return "I tried, momma." + + with mock.patch( + "google.cloud.ndb._transaction.transaction_async_", + side_effect=_transaction.transaction_async_, + ) as transaction_async_: + with in_context.new(transaction=b"tx123").use(): + future = _transaction.transaction_async( + callback, + join=False, + propagation=context_module.TransactionOptions.MANDATORY, + ) + + assert future.result() == "I tried, momma." + + transaction_async_.assert_called_once_with( + callback, + 3, + False, + True, + True, + None, + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_failure_propagation_mandatory(): + with pytest.raises(exceptions.BadRequestError): + _transaction.transaction_async( + None, + join=False, + propagation=context_module.TransactionOptions.MANDATORY, + ) + + @staticmethod + def test_invalid_propagation(): + with pytest.raises(ValueError): + _transaction.transaction_async( + None, + propagation=99, + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_invalid_join(caplog, in_context): + def callback(): + return "I tried, momma." + + provided_join_arg = False + + with mock.patch( + "google.cloud.ndb._transaction.transaction_async_", + side_effect=_transaction.transaction_async_, + ) as transaction_async_: + with in_context.new(transaction=b"tx123").use(): + with caplog.at_level(logging.WARNING): + future = _transaction.transaction_async( + callback, + join=provided_join_arg, + propagation=context_module.TransactionOptions.MANDATORY, + ) + + assert future.result() == "I tried, momma." + assert "Modifying join behaviour to maintain old NDB behaviour" in caplog.text + + transaction_async_.assert_called_once_with( + callback, + 3, + False, + True, + (not provided_join_arg), + None, + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_propagation_with_no_join_arg(caplog): + with caplog.at_level(logging.WARNING): + ctx, join = _transaction._Propagation( + context_module.TransactionOptions.ALLOWED + ).handle_propagation() + assert ( + "Modifying join behaviour to maintain old NDB behaviour" not in caplog.text + ) + assert ctx is None + assert join + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_failure_propagation(): + with pytest.raises(exceptions.NoLongerImplementedError): + _transaction.transaction_async_( + None, + propagation=context_module.TransactionOptions.ALLOWED, + ) + + @staticmethod + def test_propagation_allowed_already_in_transaction(in_context): + def callback(): + return "I tried, momma." + + with mock.patch( + "google.cloud.ndb._transaction.transaction_async_", + side_effect=_transaction.transaction_async_, + ) as transaction_async_: + with in_context.new(transaction=b"tx123").use(): + future = _transaction.transaction_async( + callback, + join=False, + propagation=context_module.TransactionOptions.ALLOWED, + ) + + assert future.result() == "I tried, momma." + + transaction_async_.assert_called_once_with( + callback, + 3, + False, + True, + True, + None, + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_api") + def test_propagation_allowed_not_yet_in_transaction(_datastore_api): + def callback(): + return "I tried, momma." + + begin_future = tasklets.Future("begin transaction") + _datastore_api.begin_transaction.return_value = begin_future + + commit_future = tasklets.Future("commit transaction") + _datastore_api.commit.return_value = commit_future + + with mock.patch( + "google.cloud.ndb._transaction.transaction_async_", + side_effect=_transaction.transaction_async_, + ) as transaction_async_: + future = _transaction.transaction_async( + callback, + join=False, + propagation=context_module.TransactionOptions.ALLOWED, + ) + + _datastore_api.begin_transaction.assert_called_once_with(False, retries=0) + begin_future.set_result(b"tx123") + + _datastore_api.commit.assert_called_once_with(b"tx123", retries=0) + commit_future.set_result(None) + + assert future.result() == "I tried, momma." + + transaction_async_.assert_called_once_with( + callback, + 3, + False, + True, + True, + None, + ) + + @staticmethod + @mock.patch("google.cloud.ndb._datastore_api") + def test_propagation_independent_already_in_transaction(_datastore_api, in_context): + def callback(): + return "I tried, momma." + + begin_future = tasklets.Future("begin transaction") + _datastore_api.begin_transaction.return_value = begin_future + + commit_future = tasklets.Future("commit transaction") + _datastore_api.commit.return_value = commit_future + + with mock.patch( + "google.cloud.ndb._transaction.transaction_async_", + side_effect=_transaction.transaction_async_, + ) as transaction_async_: + with in_context.new(transaction=b"tx123").use(): + future = _transaction.transaction_async( + callback, + join=True, + propagation=context_module.TransactionOptions.INDEPENDENT, + ) + + _datastore_api.begin_transaction.assert_called_once_with(False, retries=0) + begin_future.set_result(b"tx456") + + _datastore_api.commit.assert_called_once_with(b"tx456", retries=0) + commit_future.set_result(None) + + assert future.result() == "I tried, momma." + + transaction_async_.assert_called_once_with( + callback, + 3, + False, + False, + True, + None, + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_api") + def test_propagation_independent_not_yet_in_transaction(_datastore_api): + def callback(): + return "I tried, momma." + + begin_future = tasklets.Future("begin transaction") + _datastore_api.begin_transaction.return_value = begin_future + + commit_future = tasklets.Future("commit transaction") + _datastore_api.commit.return_value = commit_future + + with mock.patch( + "google.cloud.ndb._transaction.transaction_async_", + side_effect=_transaction.transaction_async_, + ) as transaction_async_: + future = _transaction.transaction_async( + callback, + join=False, + propagation=context_module.TransactionOptions.INDEPENDENT, + ) + + _datastore_api.begin_transaction.assert_called_once_with(False, retries=0) + begin_future.set_result(b"tx123") + + _datastore_api.commit.assert_called_once_with(b"tx123", retries=0) + commit_future.set_result(None) + + assert future.result() == "I tried, momma." + + transaction_async_.assert_called_once_with( + callback, + 3, + False, + False, + True, + None, + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_api") + def test_success_no_retries(_datastore_api): + def callback(): + return "I tried, momma." + + begin_future = tasklets.Future("begin transaction") + _datastore_api.begin_transaction.return_value = begin_future + + commit_future = tasklets.Future("commit transaction") + _datastore_api.commit.return_value = commit_future + + future = _transaction.transaction_async(callback, retries=0) + + _datastore_api.begin_transaction.assert_called_once_with(False, retries=0) + begin_future.set_result(b"tx123") + + _datastore_api.commit.assert_called_once_with(b"tx123", retries=0) + commit_future.set_result(None) + + assert future.result() == "I tried, momma." + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_api") + def test_success_callback_is_tasklet(_datastore_api): + tasklet = tasklets.Future("tasklet") + + def callback(): + return tasklet + + begin_future = tasklets.Future("begin transaction") + _datastore_api.begin_transaction.return_value = begin_future + + commit_future = tasklets.Future("commit transaction") + _datastore_api.commit.return_value = commit_future + + future = _transaction.transaction_async(callback) + + _datastore_api.begin_transaction.assert_called_once_with(False, retries=0) + begin_future.set_result(b"tx123") + + tasklet.set_result("I tried, momma.") + + _datastore_api.commit.assert_called_once_with(b"tx123", retries=0) + commit_future.set_result(None) + + assert future.result() == "I tried, momma." + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_api") + def test_run_inner_loop(_datastore_api): + begin_futures = [ + tasklets.Future("begin transaction 1"), + tasklets.Future("begin transaction 2"), + ] + _datastore_api.begin_transaction.side_effect = begin_futures + + commit_futures = [ + tasklets.Future("commit transaction 1"), + tasklets.Future("commit transaction 2"), + ] + _datastore_api.commit.side_effect = commit_futures + + @tasklets.tasklet + def callback(): + # Scheduling the sleep call here causes control to go back up to + # the main loop before this tasklet, running in the transaction + # loop, has finished, forcing a call to run_inner_loop via the idle + # handler. + yield tasklets.sleep(0) + + @tasklets.tasklet + def some_tasklet(): + # This tasklet runs in the main loop. In order to get results back + # from the transaction_async calls, the run_inner_loop idle handler + # will have to be run. + yield [ + _transaction.transaction_async(callback), + _transaction.transaction_async(callback), + ] + + # Scheduling this sleep call forces the run_inner_loop idle handler + # to be run again so we can run it in the case when there is no + # more work to be done in the transaction. (Branch coverage.) + yield tasklets.sleep(0) + + raise tasklets.Return("I tried, momma.") + + future = some_tasklet() + + begin_futures[0].set_result(b"tx123") + begin_futures[1].set_result(b"tx234") + commit_futures[0].set_result(None) + commit_futures[1].set_result(None) + + assert future.result() == "I tried, momma." + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_api") + def test_error(_datastore_api): + error = Exception("Spurious error.") + + def callback(): + raise error + + begin_future = tasklets.Future("begin transaction") + _datastore_api.begin_transaction.return_value = begin_future + + rollback_future = tasklets.Future("rollback transaction") + _datastore_api.rollback.return_value = rollback_future + + future = _transaction.transaction_async(callback) + + _datastore_api.begin_transaction.assert_called_once_with(False, retries=0) + begin_future.set_result(b"tx123") + + _datastore_api.rollback.assert_called_once_with(b"tx123") + rollback_future.set_result(None) + + assert future.exception() is error + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb.tasklets.sleep") + @mock.patch("google.cloud.ndb._retry.core_retry") + @mock.patch("google.cloud.ndb._datastore_api") + def test_transient_error(_datastore_api, core_retry, sleep): + core_retry.exponential_sleep_generator.return_value = itertools.count() + core_retry.if_transient_error.return_value = True + + callback = mock.Mock(side_effect=[Exception("Spurious error."), "foo"]) + + begin_future = tasklets.Future("begin transaction") + begin_future.set_result(b"tx123") + _datastore_api.begin_transaction.return_value = begin_future + + rollback_future = tasklets.Future("rollback transaction") + _datastore_api.rollback.return_value = rollback_future + rollback_future.set_result(None) + + commit_future = tasklets.Future("commit transaction") + _datastore_api.commit.return_value = commit_future + commit_future.set_result(None) + + sleep_future = tasklets.Future("sleep") + sleep_future.set_result(None) + sleep.return_value = sleep_future + + future = _transaction.transaction_async(callback) + assert future.result() == "foo" + + _datastore_api.begin_transaction.call_count == 2 + _datastore_api.rollback.assert_called_once_with(b"tx123") + sleep.assert_called_once_with(0) + _datastore_api.commit.assert_called_once_with(b"tx123", retries=0) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb.tasklets.sleep") + @mock.patch("google.cloud.ndb._retry.core_retry") + @mock.patch("google.cloud.ndb._datastore_api") + def test_too_many_transient_errors(_datastore_api, core_retry, sleep): + core_retry.exponential_sleep_generator.return_value = itertools.count() + core_retry.if_transient_error.return_value = True + + error = Exception("Spurious error.") + + def callback(): + raise error + + begin_future = tasklets.Future("begin transaction") + begin_future.set_result(b"tx123") + _datastore_api.begin_transaction.return_value = begin_future + + rollback_future = tasklets.Future("rollback transaction") + _datastore_api.rollback.return_value = rollback_future + rollback_future.set_result(None) + + commit_future = tasklets.Future("commit transaction") + _datastore_api.commit.return_value = commit_future + commit_future.set_result(None) + + sleep_future = tasklets.Future("sleep") + sleep_future.set_result(None) + sleep.return_value = sleep_future + + future = _transaction.transaction_async(callback) + with pytest.raises(core_exceptions.RetryError) as error_context: + future.check_success() + + assert error_context.value.cause is error + + assert _datastore_api.begin_transaction.call_count == 4 + assert _datastore_api.rollback.call_count == 4 + assert sleep.call_count == 4 + _datastore_api.commit.assert_not_called() + + +@pytest.mark.usefixtures("in_context") +@mock.patch("google.cloud.ndb._datastore_api") +def test_transactional(_datastore_api): + @_transaction.transactional() + def simple_function(a, b): + return a + b + + begin_future = tasklets.Future("begin transaction") + _datastore_api.begin_transaction.return_value = begin_future + + commit_future = tasklets.Future("commit transaction") + _datastore_api.commit.return_value = commit_future + + begin_future.set_result(b"tx123") + commit_future.set_result(None) + + res = simple_function(100, 42) + assert res == 142 + + +@pytest.mark.usefixtures("in_context") +@mock.patch("google.cloud.ndb._datastore_api") +def test_transactional_async(_datastore_api): + @_transaction.transactional_async() + def simple_function(a, b): + return a + b + + begin_future = tasklets.Future("begin transaction") + _datastore_api.begin_transaction.return_value = begin_future + + commit_future = tasklets.Future("commit transaction") + _datastore_api.commit.return_value = commit_future + + begin_future.set_result(b"tx123") + commit_future.set_result(None) + + res = simple_function(100, 42) + assert res.result() == 142 + + +@pytest.mark.usefixtures("in_context") +@mock.patch("google.cloud.ndb._datastore_api") +def test_transactional_tasklet(_datastore_api): + @_transaction.transactional_tasklet() + def generator_function(dependency): + value = yield dependency + raise tasklets.Return(value + 42) + + begin_future = tasklets.Future("begin transaction") + _datastore_api.begin_transaction.return_value = begin_future + + commit_future = tasklets.Future("commit transaction") + _datastore_api.commit.return_value = commit_future + + begin_future.set_result(b"tx123") + commit_future.set_result(None) + + dependency = tasklets.Future() + dependency.set_result(100) + + res = generator_function(dependency) + assert res.result() == 142 + + +@pytest.mark.usefixtures("in_context") +def test_non_transactional_out_of_transaction(): + @_transaction.non_transactional() + def simple_function(a, b): + return a + b + + res = simple_function(100, 42) + assert res == 142 + + +@pytest.mark.usefixtures("in_context") +def test_non_transactional_in_transaction(in_context): + with in_context.new(transaction=b"tx123").use(): + + def simple_function(a, b): + return a + b + + wrapped_function = _transaction.non_transactional()(simple_function) + + res = wrapped_function(100, 42) + assert res == 142 + + with pytest.raises(exceptions.BadRequestError): + wrapped_function = _transaction.non_transactional(allow_existing=False)( + simple_function + ) + wrapped_function(100, 42) diff --git a/packages/google-cloud-ndb/tests/unit/test_blobstore.py b/packages/google-cloud-ndb/tests/unit/test_blobstore.py new file mode 100644 index 000000000000..7a75c83a6e8e --- /dev/null +++ b/packages/google-cloud-ndb/tests/unit/test_blobstore.py @@ -0,0 +1,181 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +from google.cloud.ndb import _datastore_types +from google.cloud.ndb import blobstore +from google.cloud.ndb import model + +from . import utils + + +def test___all__(): + utils.verify___all__(blobstore) + + +def test_BlobKey(): + assert blobstore.BlobKey is _datastore_types.BlobKey + + +def test_BlobKeyProperty(): + assert blobstore.BlobKeyProperty is model.BlobKeyProperty + + +class TestBlobFetchSizeTooLargeError: + @staticmethod + def test_constructor(): + with pytest.raises(NotImplementedError): + blobstore.BlobFetchSizeTooLargeError() + + +class TestBlobInfo: + @staticmethod + def test_constructor(): + with pytest.raises(NotImplementedError): + blobstore.BlobInfo() + + @staticmethod + def test_get(): + with pytest.raises(NotImplementedError): + blobstore.BlobInfo.get() + + @staticmethod + def test_get_async(): + with pytest.raises(NotImplementedError): + blobstore.BlobInfo.get_async() + + @staticmethod + def test_get_multi(): + with pytest.raises(NotImplementedError): + blobstore.BlobInfo.get_multi() + + @staticmethod + def test_get_multi_async(): + with pytest.raises(NotImplementedError): + blobstore.BlobInfo.get_multi_async() + + +class TestBlobInfoParseError: + @staticmethod + def test_constructor(): + with pytest.raises(NotImplementedError): + blobstore.BlobInfoParseError() + + +class TestBlobNotFoundError: + @staticmethod + def test_constructor(): + with pytest.raises(NotImplementedError): + blobstore.BlobNotFoundError() + + +class TestBlobReader: + @staticmethod + def test_constructor(): + with pytest.raises(NotImplementedError): + blobstore.BlobReader() + + +def test_create_upload_url(): + with pytest.raises(NotImplementedError): + blobstore.create_upload_url() + + +def test_create_upload_url_async(): + with pytest.raises(NotImplementedError): + blobstore.create_upload_url_async() + + +class TestDataIndexOutOfRangeError: + @staticmethod + def test_constructor(): + with pytest.raises(NotImplementedError): + blobstore.DataIndexOutOfRangeError() + + +def test_delete(): + with pytest.raises(NotImplementedError): + blobstore.delete() + + +def test_delete_async(): + with pytest.raises(NotImplementedError): + blobstore.delete_async() + + +def test_delete_multi(): + with pytest.raises(NotImplementedError): + blobstore.delete_multi() + + +def test_delete_multi_async(): + with pytest.raises(NotImplementedError): + blobstore.delete_multi_async() + + +class TestError: + @staticmethod + def test_constructor(): + with pytest.raises(NotImplementedError): + blobstore.Error() + + +def test_fetch_data(): + with pytest.raises(NotImplementedError): + blobstore.fetch_data() + + +def test_fetch_data_async(): + with pytest.raises(NotImplementedError): + blobstore.fetch_data_async() + + +def test_get(): + # NOTE: `is` identity doesn't work for class methods + assert blobstore.get == blobstore.BlobInfo.get + + +def test_get_async(): + # NOTE: `is` identity doesn't work for class methods + assert blobstore.get_async == blobstore.BlobInfo.get_async + + +def test_get_multi(): + # NOTE: `is` identity doesn't work for class methods + assert blobstore.get_multi == blobstore.BlobInfo.get_multi + + +def test_get_multi_async(): + # NOTE: `is` identity doesn't work for class methods + assert blobstore.get_multi_async == blobstore.BlobInfo.get_multi_async + + +class TestInternalError: + @staticmethod + def test_constructor(): + with pytest.raises(NotImplementedError): + blobstore.InternalError() + + +def test_parse_blob_info(): + with pytest.raises(NotImplementedError): + blobstore.parse_blob_info() + + +class TestPermissionDeniedError: + @staticmethod + def test_constructor(): + with pytest.raises(NotImplementedError): + blobstore.PermissionDeniedError() diff --git a/packages/google-cloud-ndb/tests/unit/test_client.py b/packages/google-cloud-ndb/tests/unit/test_client.py new file mode 100644 index 000000000000..0f7019fc115e --- /dev/null +++ b/packages/google-cloud-ndb/tests/unit/test_client.py @@ -0,0 +1,184 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import contextlib +import pytest + +from unittest import mock + +from google.auth import credentials +from google.api_core.client_options import ClientOptions +from google.cloud import environment_vars +from google.cloud.datastore import _http + +from google.cloud.ndb import client as client_module +from google.cloud.ndb import context as context_module +from google.cloud.ndb import _eventloop + + +@contextlib.contextmanager +def patch_credentials(project): + creds = mock.Mock(spec=credentials.Credentials) + patch = mock.patch("google.auth.default", return_value=(creds, project)) + with patch: + yield creds + + +class TestClient: + @staticmethod + def test_constructor_no_args(): + patch_environ = mock.patch.dict( + "google.cloud.ndb.client.os.environ", {}, clear=True + ) + with patch_environ: + with patch_credentials("testing"): + client = client_module.Client() + assert client.SCOPE == ("https://www.googleapis.com/auth/datastore",) + assert client.host == _http.DATASTORE_API_HOST + assert client.project == "testing" + assert client.database is None + assert client.namespace is None + assert client.secure is True + + @staticmethod + def test_constructor_no_args_emulator(): + patch_environ = mock.patch.dict( + "google.cloud.ndb.client.os.environ", + {"DATASTORE_EMULATOR_HOST": "foo"}, + ) + with patch_environ: + with patch_credentials("testing"): + client = client_module.Client() + assert client.SCOPE == ("https://www.googleapis.com/auth/datastore",) + assert client.host == "foo" + assert client.project == "testing" + assert client.database is None + assert client.namespace is None + assert client.secure is False + + @staticmethod + def test_constructor_get_project_from_environ(environ): + environ[environment_vars.GCD_DATASET] = "gcd-project" + with patch_credentials(None): + client = client_module.Client() + assert client.project == "gcd-project" + + @staticmethod + def test_constructor_all_args(): + with patch_credentials("testing") as creds: + client = client_module.Client( + project="test-project", + database="test-database", + namespace="test-namespace", + credentials=creds, + client_options=ClientOptions( + api_endpoint="alternate-endpoint.example.com" + ), + ) + assert client.project == "test-project" + assert client.database == "test-database" + assert client.namespace == "test-namespace" + assert client.host == "alternate-endpoint.example.com" + assert client.secure is True + + @staticmethod + def test_constructor_client_options_as_dict(): + with patch_credentials("testing") as creds: + client = client_module.Client( + project="test-project", + database="test-database", + namespace="test-namespace", + credentials=creds, + client_options={"api_endpoint": "alternate-endpoint.example.com"}, + ) + assert client.project == "test-project" + assert client.database == "test-database" + assert client.namespace == "test-namespace" + assert client.host == "alternate-endpoint.example.com" + assert client.secure is True + + @staticmethod + def test_constructor_client_options_no_api_endpoint(): + with patch_credentials("testing") as creds: + client = client_module.Client( + project="test-project", + database="test-database", + namespace="test-namespace", + credentials=creds, + client_options={"scopes": ["my_scope"]}, + ) + assert client.project == "test-project" + assert client.database == "test-database" + assert client.namespace == "test-namespace" + assert client.host == _http.DATASTORE_API_HOST + assert client.secure is True + + @staticmethod + def test__determine_default(): + with patch_credentials("testing"): + client = client_module.Client() + assert client._determine_default("this") == "this" + + @staticmethod + def test__http(): + with patch_credentials("testing"): + client = client_module.Client() + with pytest.raises(NotImplementedError): + client._http + + @staticmethod + def test_context(): + with patch_credentials("testing"): + client = client_module.Client() + + with client.context(): + context = context_module.get_context() + assert context.client is client + + @staticmethod + def test_context_double_jeopardy(): + with patch_credentials("testing"): + client = client_module.Client() + + with client.context(): + with pytest.raises(RuntimeError): + client.context().__enter__() + + @staticmethod + def test_context_unfinished_business(): + """Regression test for #213. + + Make sure the eventloop is exhausted inside the context. + + https://github.com/googleapis/python-ndb/issues/213 + """ + with patch_credentials("testing"): + client = client_module.Client() + + def finish_up(): + context = context_module.get_context() + assert context.client is client + + with client.context(): + _eventloop.call_soon(finish_up) + + @staticmethod + def test_client_info(): + with patch_credentials("testing"): + client = client_module.Client() + agent = client.client_info.to_user_agent() + assert "google-cloud-ndb" in agent + version = agent.split("/")[1] + assert version[0].isdigit() + assert "." in version diff --git a/packages/google-cloud-ndb/tests/unit/test_concurrency.py b/packages/google-cloud-ndb/tests/unit/test_concurrency.py new file mode 100644 index 000000000000..0de03c49cb65 --- /dev/null +++ b/packages/google-cloud-ndb/tests/unit/test_concurrency.py @@ -0,0 +1,77 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import os + +import pytest + +from google.cloud.ndb import _cache +from google.cloud.ndb import global_cache as global_cache_module +from google.cloud.ndb import tasklets + +try: + from test_utils import orchestrate +except ImportError: # pragma: NO COVER + orchestrate = None + +log = logging.getLogger(__name__) + + +def cache_factories(): # pragma: NO COVER + yield global_cache_module._InProcessGlobalCache + + def redis_cache(): + return global_cache_module.RedisCache.from_environment() + + if os.environ.get("REDIS_CACHE_URL"): + yield redis_cache + + def memcache_cache(): + return global_cache_module.MemcacheCache.from_environment() + + if os.environ.get("MEMCACHED_HOSTS"): + yield global_cache_module.MemcacheCache.from_environment + + +@pytest.mark.skipif( + orchestrate is None, reason="Cannot import 'orchestrate' from 'test_utils'" +) +@pytest.mark.parametrize("cache_factory", cache_factories()) +def test_global_cache_concurrent_write_692( + cache_factory, + context_factory, +): # pragma: NO COVER + """Regression test for #692 + + https://github.com/googleapis/python-ndb/issues/692 + """ + key = b"somekey" + + @tasklets.synctasklet + def lock_unlock_key(): # pragma: NO COVER + lock = yield _cache.global_lock_for_write(key) + cache_value = yield _cache.global_get(key) + assert lock in cache_value + + yield _cache.global_unlock_for_write(key, lock) + cache_value = yield _cache.global_get(key) + assert lock not in cache_value + + def run_test(): # pragma: NO COVER + global_cache = cache_factory() + with context_factory(global_cache=global_cache).use(): + lock_unlock_key() + + orchestrate.orchestrate(run_test, run_test, name="update key") diff --git a/packages/google-cloud-ndb/tests/unit/test_context.py b/packages/google-cloud-ndb/tests/unit/test_context.py new file mode 100644 index 000000000000..e65338e93610 --- /dev/null +++ b/packages/google-cloud-ndb/tests/unit/test_context.py @@ -0,0 +1,586 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +import threading + +from unittest import mock + +from google.cloud.ndb import context as context_module +from google.cloud.ndb import _eventloop +from google.cloud.ndb import exceptions +from google.cloud.ndb import key as key_module +from google.cloud.ndb import model +from google.cloud.ndb import _options + + +class Test_get_context: + @staticmethod + def test_in_context(in_context): + assert context_module.get_context() is in_context + + @staticmethod + def test_no_context_raise(): + with pytest.raises(exceptions.ContextError): + context_module.get_context() + + @staticmethod + def test_no_context_dont_raise(): + assert context_module.get_context(False) is None + + +class Test_get_toplevel_context: + @staticmethod + def test_in_context(in_context): + with in_context.new().use(): + assert context_module.get_toplevel_context() is in_context + + @staticmethod + def test_no_context_raise(): + with pytest.raises(exceptions.ContextError): + context_module.get_toplevel_context() + + @staticmethod + def test_no_context_dont_raise(): + assert context_module.get_toplevel_context(False) is None + + +class TestContext: + def _make_one(self, **kwargs): + client = mock.Mock( + namespace=None, + project="testing", + database="testdb", + spec=("namespace", "project", "database"), + stub=mock.Mock(spec=()), + ) + return context_module.Context(client, **kwargs) + + def test_constructor_defaults(self): + context = context_module.Context("client") + assert context.client == "client" + assert isinstance(context.eventloop, _eventloop.EventLoop) + assert context.batches == {} + assert context.transaction is None + + node1, pid1, sequence_no1 = context.id.split("-") + node2, pid2, sequence_no2 = context_module.Context("client").id.split("-") + assert node1 == node2 + assert pid1 == pid2 + assert int(sequence_no2) - int(sequence_no1) == 1 + + def test_constructuor_concurrent_instantiation(self): + """Regression test for #716 + + This test non-deterministically tests a potential concurrency issue. Before the + bug this is a test for was fixed, it failed most of the time. + + https://github.com/googleapis/python-ndb/issues/715 + """ + errors = [] + + def make_some(): + try: + for _ in range(10000): + context_module.Context("client") + except Exception as error: # pragma: NO COVER + errors.append(error) + + thread1 = threading.Thread(target=make_some) + thread2 = threading.Thread(target=make_some) + thread1.start() + thread2.start() + thread1.join() + thread2.join() + + assert not errors + + def test_constructor_overrides(self): + context = context_module.Context( + client="client", + eventloop="eventloop", + batches="batches", + transaction="transaction", + ) + assert context.client == "client" + assert context.eventloop == "eventloop" + assert context.batches == "batches" + assert context.transaction == "transaction" + + def test_new_transaction(self): + context = self._make_one() + new_context = context.new(transaction="tx123") + assert new_context.transaction == "tx123" + assert context.transaction is None + + def test_new_with_cache(self): + context = self._make_one() + context.cache["foo"] = "bar" + new_context = context.new() + assert context.cache is not new_context.cache + assert context.cache == new_context.cache + + def test_use(self): + context = self._make_one() + with context.use(): + assert context_module.get_context() is context + with pytest.raises(exceptions.ContextError): + context_module.get_context() + + def test_use_nested(self): + context = self._make_one() + with context.use(): + assert context_module.get_context() is context + next_context = context.new() + with next_context.use(): + assert context_module.get_context() is next_context + + assert context_module.get_context() is context + + with pytest.raises(exceptions.ContextError): + context_module.get_context() + + def test_clear_cache(self): + context = self._make_one() + context.cache["testkey"] = "testdata" + context.clear_cache() + assert not context.cache + + def test_flush(self): + eventloop = mock.Mock(spec=("run",)) + context = self._make_one(eventloop=eventloop) + context.flush() + eventloop.run.assert_called_once_with() + + def test_get_cache_policy(self): + context = self._make_one() + assert context.get_cache_policy() is context_module._default_cache_policy + + def test_get_datastore_policy(self): + context = self._make_one() + with pytest.raises(NotImplementedError): + context.get_datastore_policy() + + def test__use_datastore_default_policy(self): + class SomeKind(model.Model): + pass + + context = self._make_one() + with context.use(): + key = key_module.Key("SomeKind", 1) + options = _options.Options() + assert context._use_datastore(key, options) is True + + def test__use_datastore_from_options(self): + class SomeKind(model.Model): + pass + + context = self._make_one() + with context.use(): + key = key_module.Key("SomeKind", 1) + options = _options.Options(use_datastore=False) + assert context._use_datastore(key, options) is False + + def test_get_memcache_policy(self): + context = self._make_one() + context.get_memcache_policy() + assert ( + context.get_memcache_policy() is context_module._default_global_cache_policy + ) + + def test_get_global_cache_policy(self): + context = self._make_one() + context.get_global_cache_policy() + assert ( + context.get_memcache_policy() is context_module._default_global_cache_policy + ) + + def test_get_memcache_timeout_policy(self): + context = self._make_one() + assert ( + context.get_memcache_timeout_policy() + is context_module._default_global_cache_timeout_policy + ) + + def test_get_global_cache_timeout_policy(self): + context = self._make_one() + assert ( + context.get_global_cache_timeout_policy() + is context_module._default_global_cache_timeout_policy + ) + + def test_set_cache_policy(self): + policy = object() + context = self._make_one() + context.set_cache_policy(policy) + assert context.get_cache_policy() is policy + + def test_set_cache_policy_to_None(self): + context = self._make_one() + context.set_cache_policy(None) + assert context.get_cache_policy() is context_module._default_cache_policy + + def test_set_cache_policy_with_bool(self): + context = self._make_one() + context.set_cache_policy(False) + assert context.get_cache_policy()(None) is False + + def test__use_cache_default_policy(self): + class SomeKind(model.Model): + pass + + context = self._make_one() + with context.use(): + key = key_module.Key("SomeKind", 1) + options = _options.Options() + assert context._use_cache(key, options) is True + + def test__use_cache_from_options(self): + class SomeKind(model.Model): + pass + + context = self._make_one() + with context.use(): + key = "whocares" + options = _options.Options(use_cache=False) + assert context._use_cache(key, options) is False + + def test_set_datastore_policy(self): + context = self._make_one() + context.set_datastore_policy(None) + assert context.datastore_policy is context_module._default_datastore_policy + + def test_set_datastore_policy_as_bool(self): + context = self._make_one() + context.set_datastore_policy(False) + context.datastore_policy(None) is False + + def test_set_memcache_policy(self): + context = self._make_one() + context.set_memcache_policy(None) + assert ( + context.global_cache_policy is context_module._default_global_cache_policy + ) + + def test_set_global_cache_policy(self): + context = self._make_one() + context.set_global_cache_policy(None) + assert ( + context.global_cache_policy is context_module._default_global_cache_policy + ) + + def test_set_global_cache_policy_as_bool(self): + context = self._make_one() + context.set_global_cache_policy(True) + assert context.global_cache_policy("whatever") is True + + def test__use_global_cache_no_global_cache(self): + context = self._make_one() + assert context._use_global_cache("key") is False + + def test__use_global_cache_default_policy(self): + class SomeKind(model.Model): + pass + + context = self._make_one(global_cache="yes, there is one") + with context.use(): + key = key_module.Key("SomeKind", 1) + assert context._use_global_cache(key._key) is True + + def test__use_global_cache_from_options(self): + class SomeKind(model.Model): + pass + + context = self._make_one(global_cache="yes, there is one") + with context.use(): + key = "whocares" + options = _options.Options(use_global_cache=False) + assert context._use_global_cache(key, options=options) is False + + def test_set_memcache_timeout_policy(self): + context = self._make_one() + context.set_memcache_timeout_policy(None) + assert ( + context.global_cache_timeout_policy + is context_module._default_global_cache_timeout_policy + ) + + def test_set_global_cache_timeout_policy(self): + context = self._make_one() + context.set_global_cache_timeout_policy(None) + assert ( + context.global_cache_timeout_policy + is context_module._default_global_cache_timeout_policy + ) + + def test_set_global_cache_timeout_policy_as_int(self): + context = self._make_one() + context.set_global_cache_timeout_policy(14) + assert context.global_cache_timeout_policy("whatever") == 14 + + def test__global_cache_timeout_default_policy(self): + class SomeKind(model.Model): + pass + + context = self._make_one() + with context.use(): + key = key_module.Key("SomeKind", 1) + timeout = context._global_cache_timeout(key._key, None) + assert timeout is None + + def test__global_cache_timeout_from_options(self): + class SomeKind(model.Model): + pass + + context = self._make_one() + with context.use(): + key = "whocares" + options = _options.Options(global_cache_timeout=49) + assert context._global_cache_timeout(key, options) == 49 + + def test_call_on_commit(self): + context = self._make_one() + callback = mock.Mock() + context.call_on_commit(callback) + callback.assert_called_once_with() + + def test_call_on_commit_with_transaction(self): + callbacks = [] + callback = "himom!" + context = self._make_one(transaction=b"tx123", on_commit_callbacks=callbacks) + context.call_on_commit(callback) + assert context.on_commit_callbacks == ["himom!"] + + def test_call_on_transaction_complete(self): + context = self._make_one() + callback = mock.Mock() + context.call_on_transaction_complete(callback) + callback.assert_called_once_with() + + def test_call_on_transaction_complete_with_transaction(self): + callbacks = [] + callback = "himom!" + context = self._make_one( + transaction=b"tx123", transaction_complete_callbacks=callbacks + ) + context.call_on_transaction_complete(callback) + assert context.transaction_complete_callbacks == ["himom!"] + + def test_in_transaction(self): + context = self._make_one() + assert context.in_transaction() is False + + def test_get_namespace_from_client(self): + context = self._make_one() + context.client.namespace = "hamburgers" + assert context.get_namespace() == "hamburgers" + + def test_get_namespace_from_context(self): + context = self._make_one(namespace="hotdogs") + context.client.namespace = "hamburgers" + assert context.get_namespace() == "hotdogs" + + def test_memcache_add(self): + context = self._make_one() + with pytest.raises(NotImplementedError): + context.memcache_add() + + def test_memcache_cas(self): + context = self._make_one() + with pytest.raises(NotImplementedError): + context.memcache_cas() + + def test_memcache_decr(self): + context = self._make_one() + with pytest.raises(NotImplementedError): + context.memcache_decr() + + def test_memcache_replace(self): + context = self._make_one() + with pytest.raises(NotImplementedError): + context.memcache_replace() + + def test_memcache_set(self): + context = self._make_one() + with pytest.raises(NotImplementedError): + context.memcache_set() + + def test_memcache_delete(self): + context = self._make_one() + with pytest.raises(NotImplementedError): + context.memcache_delete() + + def test_memcache_get(self): + context = self._make_one() + with pytest.raises(NotImplementedError): + context.memcache_get() + + def test_memcache_gets(self): + context = self._make_one() + with pytest.raises(NotImplementedError): + context.memcache_gets() + + def test_memcache_incr(self): + context = self._make_one() + with pytest.raises(NotImplementedError): + context.memcache_incr() + + def test_urlfetch(self): + context = self._make_one() + with pytest.raises(NotImplementedError): + context.urlfetch() + + +class TestAutoBatcher: + @staticmethod + def test_constructor(): + with pytest.raises(NotImplementedError): + context_module.AutoBatcher() + + +class TestContextOptions: + @staticmethod + def test_constructor(): + with pytest.raises(NotImplementedError): + context_module.ContextOptions() + + +class TestTransactionOptions: + @staticmethod + def test_constructor(): + assert len(context_module.TransactionOptions._PROPAGATION) == 4 + + +class Test_default_cache_policy: + @staticmethod + def test_key_is_None(): + assert context_module._default_cache_policy(None) is None + + @staticmethod + def test_no_model_class(): + key = mock.Mock(kind=mock.Mock(return_value="nokind"), spec=("kind",)) + assert context_module._default_cache_policy(key) is None + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_standard_model(): + class ThisKind(model.Model): + pass + + key = key_module.Key("ThisKind", 0) + assert context_module._default_cache_policy(key) is None + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_standard_model_defines_policy(): + flag = object() + + class ThisKind(model.Model): + @classmethod + def _use_cache(cls, key): + return flag + + key = key_module.Key("ThisKind", 0) + assert context_module._default_cache_policy(key) is flag + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_standard_model_defines_policy_as_bool(): + class ThisKind(model.Model): + _use_cache = False + + key = key_module.Key("ThisKind", 0) + assert context_module._default_cache_policy(key) is False + + +class Test_default_global_cache_policy: + @staticmethod + def test_key_is_None(): + assert context_module._default_global_cache_policy(None) is None + + @staticmethod + def test_no_model_class(): + key = mock.Mock(kind="nokind", spec=("kind",)) + assert context_module._default_global_cache_policy(key) is None + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_standard_model(): + class ThisKind(model.Model): + pass + + key = key_module.Key("ThisKind", 0) + assert context_module._default_global_cache_policy(key._key) is None + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_standard_model_defines_policy(): + flag = object() + + class ThisKind(model.Model): + @classmethod + def _use_global_cache(cls, key): + return flag + + key = key_module.Key("ThisKind", 0) + assert context_module._default_global_cache_policy(key._key) is flag + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_standard_model_defines_policy_as_bool(): + class ThisKind(model.Model): + _use_global_cache = False + + key = key_module.Key("ThisKind", 0) + assert context_module._default_global_cache_policy(key._key) is False + + +class Test_default_global_cache_timeout_policy: + @staticmethod + def test_key_is_None(): + assert context_module._default_global_cache_timeout_policy(None) is None + + @staticmethod + def test_no_model_class(): + key = mock.Mock(kind="nokind", spec=("kind",)) + assert context_module._default_global_cache_timeout_policy(key) is None + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_standard_model(): + class ThisKind(model.Model): + pass + + key = key_module.Key("ThisKind", 0) + assert context_module._default_global_cache_timeout_policy(key._key) is None + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_standard_model_defines_policy(): + class ThisKind(model.Model): + @classmethod + def _global_cache_timeout(cls, key): + return 13 + + key = key_module.Key("ThisKind", 0) + assert context_module._default_global_cache_timeout_policy(key._key) == 13 + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_standard_model_defines_policy_as_int(): + class ThisKind(model.Model): + _global_cache_timeout = 12 + + key = key_module.Key("ThisKind", 0) + assert context_module._default_global_cache_timeout_policy(key._key) == 12 diff --git a/packages/google-cloud-ndb/tests/unit/test_django_middleware.py b/packages/google-cloud-ndb/tests/unit/test_django_middleware.py new file mode 100644 index 000000000000..3023bb0556c2 --- /dev/null +++ b/packages/google-cloud-ndb/tests/unit/test_django_middleware.py @@ -0,0 +1,30 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +from google.cloud.ndb import django_middleware + +from . import utils + + +def test___all__(): + utils.verify___all__(django_middleware) + + +class TestNdbDjangoMiddleware: + @staticmethod + def test_constructor(): + with pytest.raises(NotImplementedError): + django_middleware.NdbDjangoMiddleware() diff --git a/packages/google-cloud-ndb/tests/unit/test_global_cache.py b/packages/google-cloud-ndb/tests/unit/test_global_cache.py new file mode 100644 index 000000000000..c7c73962c58b --- /dev/null +++ b/packages/google-cloud-ndb/tests/unit/test_global_cache.py @@ -0,0 +1,728 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import collections + +from unittest import mock + +import pytest +import redis as redis_module + +from google.cloud.ndb import global_cache + + +class TestGlobalCache: + def make_one(self): + class MockImpl(global_cache.GlobalCache): + def get(self, keys): + return super(MockImpl, self).get(keys) + + def set(self, items, expires=None): + return super(MockImpl, self).set(items, expires=expires) + + def set_if_not_exists(self, items, expires=None): + return super(MockImpl, self).set_if_not_exists(items, expires=expires) + + def delete(self, keys): + return super(MockImpl, self).delete(keys) + + def watch(self, keys): + return super(MockImpl, self).watch(keys) + + def unwatch(self, keys): + return super(MockImpl, self).unwatch(keys) + + def compare_and_swap(self, items, expires=None): + return super(MockImpl, self).compare_and_swap(items, expires=expires) + + def clear(self): + return super(MockImpl, self).clear() + + return MockImpl() + + def test_get(self): + cache = self.make_one() + with pytest.raises(NotImplementedError): + cache.get(b"foo") + + def test_set(self): + cache = self.make_one() + with pytest.raises(NotImplementedError): + cache.set({b"foo": "bar"}) + + def test_set_if_not_exists(self): + cache = self.make_one() + with pytest.raises(NotImplementedError): + cache.set_if_not_exists({b"foo": "bar"}) + + def test_delete(self): + cache = self.make_one() + with pytest.raises(NotImplementedError): + cache.delete(b"foo") + + def test_watch(self): + cache = self.make_one() + with pytest.raises(NotImplementedError): + cache.watch(b"foo") + + def test_unwatch(self): + cache = self.make_one() + with pytest.raises(NotImplementedError): + cache.unwatch(b"foo") + + def test_compare_and_swap(self): + cache = self.make_one() + with pytest.raises(NotImplementedError): + cache.compare_and_swap({b"foo": "bar"}) + + def test_clear(self): + cache = self.make_one() + with pytest.raises(NotImplementedError): + cache.clear() + + +class TestInProcessGlobalCache: + @staticmethod + def test_set_get_delete(): + cache = global_cache._InProcessGlobalCache() + result = cache.set({b"one": b"foo", b"two": b"bar", b"three": b"baz"}) + assert result is None + + result = cache.get([b"two", b"three", b"one"]) + assert result == [b"bar", b"baz", b"foo"] + + cache = global_cache._InProcessGlobalCache() + result = cache.get([b"two", b"three", b"one"]) + assert result == [b"bar", b"baz", b"foo"] + + result = cache.delete([b"one", b"two", b"three"]) + assert result is None + + result = cache.get([b"two", b"three", b"one"]) + assert result == [None, None, None] + + @staticmethod + @mock.patch("google.cloud.ndb.global_cache.time") + def test_set_get_delete_w_expires(time): + time.time.return_value = 0 + + cache = global_cache._InProcessGlobalCache() + result = cache.set( + {b"one": b"foo", b"two": b"bar", b"three": b"baz"}, expires=5 + ) + assert result is None + + result = cache.get([b"two", b"three", b"one"]) + assert result == [b"bar", b"baz", b"foo"] + + time.time.return_value = 10 + result = cache.get([b"two", b"three", b"one"]) + assert result == [None, None, None] + + @staticmethod + def test_set_if_not_exists(): + cache = global_cache._InProcessGlobalCache() + result = cache.set_if_not_exists({b"one": b"foo", b"two": b"bar"}) + assert result == {b"one": True, b"two": True} + + result = cache.set_if_not_exists({b"two": b"bar", b"three": b"baz"}) + assert result == {b"two": False, b"three": True} + + result = cache.get([b"two", b"three", b"one"]) + assert result == [b"bar", b"baz", b"foo"] + + @staticmethod + @mock.patch("google.cloud.ndb.global_cache.time") + def test_set_if_not_exists_w_expires(time): + time.time.return_value = 0 + + cache = global_cache._InProcessGlobalCache() + result = cache.set_if_not_exists({b"one": b"foo", b"two": b"bar"}, expires=5) + assert result == {b"one": True, b"two": True} + + result = cache.set_if_not_exists({b"two": b"bar", b"three": b"baz"}, expires=5) + assert result == {b"two": False, b"three": True} + + result = cache.get([b"two", b"three", b"one"]) + assert result == [b"bar", b"baz", b"foo"] + + time.time.return_value = 10 + result = cache.get([b"two", b"three", b"one"]) + assert result == [None, None, None] + + @staticmethod + def test_watch_compare_and_swap(): + cache = global_cache._InProcessGlobalCache() + cache.cache[b"one"] = (b"food", None) + cache.cache[b"two"] = (b"bard", None) + cache.cache[b"three"] = (b"bazz", None) + result = cache.watch({b"one": b"food", b"two": b"bard", b"three": b"bazd"}) + assert result is None + + cache.cache[b"two"] = (b"hamburgers", None) + + result = cache.compare_and_swap( + {b"one": b"foo", b"two": b"bar", b"three": b"baz"} + ) + assert result == {b"one": True, b"two": False, b"three": False} + + result = cache.get([b"one", b"two", b"three"]) + assert result == [b"foo", b"hamburgers", b"bazz"] + + @staticmethod + @mock.patch("google.cloud.ndb.global_cache.time") + def test_watch_compare_and_swap_with_expires(time): + time.time.return_value = 0 + + cache = global_cache._InProcessGlobalCache() + cache.cache[b"one"] = (b"food", None) + cache.cache[b"two"] = (b"bard", None) + cache.cache[b"three"] = (b"bazz", None) + result = cache.watch({b"one": b"food", b"two": b"bard", b"three": b"bazd"}) + assert result is None + + cache.cache[b"two"] = (b"hamburgers", None) + + result = cache.compare_and_swap( + {b"one": b"foo", b"two": b"bar", b"three": b"baz"}, expires=5 + ) + assert result == {b"one": True, b"two": False, b"three": False} + + result = cache.get([b"one", b"two", b"three"]) + assert result == [b"foo", b"hamburgers", b"bazz"] + + time.time.return_value = 10 + + result = cache.get([b"one", b"two", b"three"]) + assert result == [None, b"hamburgers", b"bazz"] + + @staticmethod + def test_watch_unwatch(): + cache = global_cache._InProcessGlobalCache() + result = cache.watch({b"one": "foo", b"two": "bar", b"three": "baz"}) + assert result is None + + result = cache.unwatch([b"one", b"two", b"three"]) + assert result is None + assert cache._watch_keys == {} + + @staticmethod + def test_clear(): + cache = global_cache._InProcessGlobalCache() + cache.cache["foo"] = "bar" + cache.clear() + assert cache.cache == {} + + +class TestRedisCache: + @staticmethod + def test_constructor(): + redis = object() + cache = global_cache.RedisCache(redis) + assert cache.redis is redis + + @staticmethod + @mock.patch("google.cloud.ndb.global_cache.redis_module") + def test_from_environment(redis_module): + redis = redis_module.Redis.from_url.return_value + with mock.patch.dict("os.environ", {"REDIS_CACHE_URL": "some://url"}): + cache = global_cache.RedisCache.from_environment() + assert cache.redis is redis + redis_module.Redis.from_url.assert_called_once_with("some://url") + + @staticmethod + def test_from_environment_not_configured(): + with mock.patch.dict("os.environ", {"REDIS_CACHE_URL": ""}): + cache = global_cache.RedisCache.from_environment() + assert cache is None + + @staticmethod + def test_get(): + redis = mock.Mock(spec=("mget",)) + cache_keys = [object(), object()] + cache_value = redis.mget.return_value + cache = global_cache.RedisCache(redis) + assert cache.get(cache_keys) is cache_value + redis.mget.assert_called_once_with(cache_keys) + + @staticmethod + def test_set(): + redis = mock.Mock(spec=("mset",)) + cache_items = {"a": "foo", "b": "bar"} + cache = global_cache.RedisCache(redis) + cache.set(cache_items) + redis.mset.assert_called_once_with(cache_items) + + @staticmethod + def test_set_w_expires(): + expired = {} + + def mock_expire(key, expires): + expired[key] = expires + + redis = mock.Mock(expire=mock_expire, spec=("mset", "expire")) + cache_items = {"a": "foo", "b": "bar"} + cache = global_cache.RedisCache(redis) + cache.set(cache_items, expires=32) + redis.mset.assert_called_once_with(cache_items) + assert expired == {"a": 32, "b": 32} + + @staticmethod + def test_set_if_not_exists(): + redis = mock.Mock(spec=("setnx",)) + redis.setnx.side_effect = (True, False) + cache_items = collections.OrderedDict([("a", "foo"), ("b", "bar")]) + cache = global_cache.RedisCache(redis) + results = cache.set_if_not_exists(cache_items) + assert results == {"a": True, "b": False} + redis.setnx.assert_has_calls( + [ + mock.call("a", "foo"), + mock.call("b", "bar"), + ] + ) + + @staticmethod + def test_set_if_not_exists_w_expires(): + redis = mock.Mock(spec=("setnx", "expire")) + redis.setnx.side_effect = (True, False) + cache_items = collections.OrderedDict([("a", "foo"), ("b", "bar")]) + cache = global_cache.RedisCache(redis) + results = cache.set_if_not_exists(cache_items, expires=123) + assert results == {"a": True, "b": False} + redis.setnx.assert_has_calls( + [ + mock.call("a", "foo"), + mock.call("b", "bar"), + ] + ) + redis.expire.assert_called_once_with("a", 123) + + @staticmethod + def test_delete(): + redis = mock.Mock(spec=("delete",)) + cache_keys = [object(), object()] + cache = global_cache.RedisCache(redis) + cache.delete(cache_keys) + redis.delete.assert_called_once_with(*cache_keys) + + @staticmethod + def test_watch(): + def mock_redis_get(key): + if key == "foo": + return "moo" + + return "nope" + + redis = mock.Mock( + pipeline=mock.Mock(spec=("watch", "get", "reset")), spec=("pipeline",) + ) + pipe = redis.pipeline.return_value + pipe.get.side_effect = mock_redis_get + items = {"foo": "moo", "bar": "car"} + cache = global_cache.RedisCache(redis) + cache.watch(items) + + pipe.watch.assert_has_calls( + [ + mock.call("foo"), + mock.call("bar"), + ], + any_order=True, + ) + + pipe.get.assert_has_calls( + [ + mock.call("foo"), + mock.call("bar"), + ], + any_order=True, + ) + + assert cache.pipes == {"foo": pipe} + + @staticmethod + def test_unwatch(): + redis = mock.Mock(spec=()) + cache = global_cache.RedisCache(redis) + pipe = mock.Mock(spec=("reset",)) + cache._pipes.pipes = { + "ay": pipe, + "be": pipe, + "see": pipe, + "dee": pipe, + "whatevs": "himom!", + } + + cache.unwatch(["ay", "be", "see", "dee", "nuffin"]) + assert cache.pipes == {"whatevs": "himom!"} + pipe.reset.assert_has_calls([mock.call()] * 4) + + @staticmethod + def test_compare_and_swap(): + redis = mock.Mock(spec=()) + cache = global_cache.RedisCache(redis) + pipe1 = mock.Mock(spec=("multi", "set", "execute", "reset")) + pipe2 = mock.Mock(spec=("multi", "set", "execute", "reset")) + pipe2.execute.side_effect = redis_module.exceptions.WatchError + cache._pipes.pipes = { + "foo": pipe1, + "bar": pipe2, + } + + result = cache.compare_and_swap( + { + "foo": "moo", + "bar": "car", + "baz": "maz", + } + ) + assert result == {"foo": True, "bar": False, "baz": False} + + pipe1.multi.assert_called_once_with() + pipe1.set.assert_called_once_with("foo", "moo") + pipe1.execute.assert_called_once_with() + pipe1.reset.assert_called_once_with() + + pipe2.multi.assert_called_once_with() + pipe2.set.assert_called_once_with("bar", "car") + pipe2.execute.assert_called_once_with() + pipe2.reset.assert_called_once_with() + + @staticmethod + def test_compare_and_swap_w_expires(): + redis = mock.Mock(spec=()) + cache = global_cache.RedisCache(redis) + pipe1 = mock.Mock(spec=("multi", "setex", "execute", "reset")) + pipe2 = mock.Mock(spec=("multi", "setex", "execute", "reset")) + pipe2.execute.side_effect = redis_module.exceptions.WatchError + cache._pipes.pipes = { + "foo": pipe1, + "bar": pipe2, + } + + result = cache.compare_and_swap( + { + "foo": "moo", + "bar": "car", + "baz": "maz", + }, + expires=5, + ) + assert result == {"foo": True, "bar": False, "baz": False} + + pipe1.multi.assert_called_once_with() + pipe1.setex.assert_called_once_with("foo", 5, "moo") + pipe1.execute.assert_called_once_with() + pipe1.reset.assert_called_once_with() + + pipe2.multi.assert_called_once_with() + pipe2.setex.assert_called_once_with("bar", 5, "car") + pipe2.execute.assert_called_once_with() + pipe2.reset.assert_called_once_with() + + @staticmethod + def test_clear(): + redis = mock.Mock(spec=("flushdb",)) + cache = global_cache.RedisCache(redis) + cache.clear() + redis.flushdb.assert_called_once_with() + + +class TestMemcacheCache: + @staticmethod + def test__key_long_key(): + key = b"ou812" * 100 + encoded = global_cache.MemcacheCache._key(key) + assert len(encoded) == 40 # sha1 hashes are 40 bytes + + @staticmethod + @mock.patch("google.cloud.ndb.global_cache.pymemcache") + def test_from_environment_not_configured(pymemcache): + with mock.patch.dict("os.environ", {"MEMCACHED_HOSTS": None}): + assert global_cache.MemcacheCache.from_environment() is None + + @staticmethod + @mock.patch("google.cloud.ndb.global_cache.pymemcache") + def test_from_environment_one_host_no_port(pymemcache): + with mock.patch.dict("os.environ", {"MEMCACHED_HOSTS": "somehost"}): + cache = global_cache.MemcacheCache.from_environment() + assert cache.client is pymemcache.PooledClient.return_value + pymemcache.PooledClient.assert_called_once_with( + ("somehost", 11211), max_pool_size=4 + ) + + @staticmethod + @mock.patch("google.cloud.ndb.global_cache.pymemcache") + def test_from_environment_one_host_with_port(pymemcache): + with mock.patch.dict("os.environ", {"MEMCACHED_HOSTS": "somehost:22422"}): + cache = global_cache.MemcacheCache.from_environment() + assert cache.client is pymemcache.PooledClient.return_value + pymemcache.PooledClient.assert_called_once_with( + ("somehost", 22422), max_pool_size=4 + ) + + @staticmethod + @mock.patch("google.cloud.ndb.global_cache.pymemcache") + def test_from_environment_two_hosts_with_port(pymemcache): + with mock.patch.dict( + "os.environ", {"MEMCACHED_HOSTS": "somehost:22422 otherhost:33633"} + ): + cache = global_cache.MemcacheCache.from_environment() + assert cache.client is pymemcache.HashClient.return_value + pymemcache.HashClient.assert_called_once_with( + [("somehost", 22422), ("otherhost", 33633)], + use_pooling=True, + max_pool_size=4, + ) + + @staticmethod + @mock.patch("google.cloud.ndb.global_cache.pymemcache") + def test_from_environment_two_hosts_no_port(pymemcache): + with mock.patch.dict("os.environ", {"MEMCACHED_HOSTS": "somehost otherhost"}): + cache = global_cache.MemcacheCache.from_environment() + assert cache.client is pymemcache.HashClient.return_value + pymemcache.HashClient.assert_called_once_with( + [("somehost", 11211), ("otherhost", 11211)], + use_pooling=True, + max_pool_size=4, + ) + + @staticmethod + @mock.patch("google.cloud.ndb.global_cache.pymemcache") + def test_from_environment_one_host_no_port_pool_size_zero(pymemcache): + with mock.patch.dict("os.environ", {"MEMCACHED_HOSTS": "somehost"}): + cache = global_cache.MemcacheCache.from_environment(max_pool_size=0) + assert cache.client is pymemcache.PooledClient.return_value + pymemcache.PooledClient.assert_called_once_with( + ("somehost", 11211), max_pool_size=1 + ) + + @staticmethod + @mock.patch("google.cloud.ndb.global_cache.pymemcache") + def test_from_environment_bad_host_extra_colon(pymemcache): + with mock.patch.dict("os.environ", {"MEMCACHED_HOSTS": "somehost:say:what?"}): + with pytest.raises(ValueError): + global_cache.MemcacheCache.from_environment() + + @staticmethod + @mock.patch("google.cloud.ndb.global_cache.pymemcache") + def test_from_environment_bad_host_port_not_an_integer(pymemcache): + with mock.patch.dict("os.environ", {"MEMCACHED_HOSTS": "somehost:saywhat?"}): + with pytest.raises(ValueError): + global_cache.MemcacheCache.from_environment() + + @staticmethod + def test_get(): + client = mock.Mock(spec=("get_many",)) + cache = global_cache.MemcacheCache(client) + key1 = cache._key(b"one") + key2 = cache._key(b"two") + client.get_many.return_value = {key1: "bun", key2: "shoe"} + assert cache.get((b"one", b"two")) == ["bun", "shoe"] + client.get_many.assert_called_once_with([key1, key2]) + + @staticmethod + def test_set(): + client = mock.Mock(spec=("set_many",)) + client.set_many.return_value = [] + cache = global_cache.MemcacheCache(client) + key1 = cache._key(b"one") + key2 = cache._key(b"two") + cache.set( + { + b"one": "bun", + b"two": "shoe", + } + ) + client.set_many.assert_called_once_with( + { + key1: "bun", + key2: "shoe", + }, + expire=0, + noreply=False, + ) + + @staticmethod + def test_set_if_not_exists(): + client = mock.Mock(spec=("add",)) + client.add.side_effect = (True, False) + cache_items = collections.OrderedDict([(b"a", b"foo"), (b"b", b"bar")]) + cache = global_cache.MemcacheCache(client) + results = cache.set_if_not_exists(cache_items) + assert results == {b"a": True, b"b": False} + client.add.assert_has_calls( + [ + mock.call(cache._key(b"a"), b"foo", expire=0, noreply=False), + mock.call(cache._key(b"b"), b"bar", expire=0, noreply=False), + ] + ) + + @staticmethod + def test_set_if_not_exists_w_expires(): + client = mock.Mock(spec=("add",)) + client.add.side_effect = (True, False) + cache_items = collections.OrderedDict([(b"a", b"foo"), (b"b", b"bar")]) + cache = global_cache.MemcacheCache(client) + results = cache.set_if_not_exists(cache_items, expires=123) + assert results == {b"a": True, b"b": False} + client.add.assert_has_calls( + [ + mock.call(cache._key(b"a"), b"foo", expire=123, noreply=False), + mock.call(cache._key(b"b"), b"bar", expire=123, noreply=False), + ] + ) + + @staticmethod + def test_set_w_expires(): + client = mock.Mock(spec=("set_many",)) + client.set_many.return_value = [] + cache = global_cache.MemcacheCache(client) + key1 = cache._key(b"one") + key2 = cache._key(b"two") + cache.set( + { + b"one": "bun", + b"two": "shoe", + }, + expires=5, + ) + client.set_many.assert_called_once_with( + { + key1: "bun", + key2: "shoe", + }, + expire=5, + noreply=False, + ) + + @staticmethod + def test_set_failed_key(): + client = mock.Mock(spec=("set_many",)) + cache = global_cache.MemcacheCache(client) + key1 = cache._key(b"one") + key2 = cache._key(b"two") + client.set_many.return_value = [key2] + + unset = cache.set( + { + b"one": "bun", + b"two": "shoe", + } + ) + assert unset == {b"two": global_cache.MemcacheCache.KeyNotSet(b"two")} + + client.set_many.assert_called_once_with( + { + key1: "bun", + key2: "shoe", + }, + expire=0, + noreply=False, + ) + + @staticmethod + def test_KeyNotSet(): + unset = global_cache.MemcacheCache.KeyNotSet(b"foo") + assert unset == global_cache.MemcacheCache.KeyNotSet(b"foo") + assert not unset == global_cache.MemcacheCache.KeyNotSet(b"goo") + assert not unset == "hamburger" + + @staticmethod + def test_delete(): + client = mock.Mock(spec=("delete_many",)) + cache = global_cache.MemcacheCache(client) + key1 = cache._key(b"one") + key2 = cache._key(b"two") + cache.delete((b"one", b"two")) + client.delete_many.assert_called_once_with([key1, key2]) + + @staticmethod + def test_watch(): + client = mock.Mock(spec=("gets_many",)) + cache = global_cache.MemcacheCache(client) + key1 = cache._key(b"one") + key2 = cache._key(b"two") + client.gets_many.return_value = { + key1: ("bun", b"0"), + key2: ("shoe", b"1"), + } + cache.watch( + collections.OrderedDict( + ( + (b"one", "bun"), + (b"two", "shot"), + ) + ) + ) + client.gets_many.assert_called_once_with([key1, key2]) + assert cache.caskeys == { + key1: b"0", + } + + @staticmethod + def test_unwatch(): + client = mock.Mock(spec=()) + cache = global_cache.MemcacheCache(client) + key2 = cache._key(b"two") + cache.caskeys[key2] = b"5" + cache.caskeys["whatevs"] = b"6" + cache.unwatch([b"one", b"two"]) + + assert cache.caskeys == {"whatevs": b"6"} + + @staticmethod + def test_compare_and_swap(): + client = mock.Mock(spec=("cas",)) + cache = global_cache.MemcacheCache(client) + key2 = cache._key(b"two") + cache.caskeys[key2] = b"5" + cache.caskeys["whatevs"] = b"6" + result = cache.compare_and_swap( + { + b"one": "bun", + b"two": "shoe", + } + ) + + assert result == {b"two": True} + client.cas.assert_called_once_with(key2, "shoe", b"5", expire=0, noreply=False) + assert cache.caskeys == {"whatevs": b"6"} + + @staticmethod + def test_compare_and_swap_and_expires(): + client = mock.Mock(spec=("cas",)) + cache = global_cache.MemcacheCache(client) + key2 = cache._key(b"two") + cache.caskeys[key2] = b"5" + cache.caskeys["whatevs"] = b"6" + result = cache.compare_and_swap( + { + b"one": "bun", + b"two": "shoe", + }, + expires=5, + ) + + assert result == {b"two": True} + client.cas.assert_called_once_with(key2, "shoe", b"5", expire=5, noreply=False) + assert cache.caskeys == {"whatevs": b"6"} + + @staticmethod + def test_clear(): + client = mock.Mock(spec=("flush_all",)) + cache = global_cache.MemcacheCache(client) + cache.clear() + client.flush_all.assert_called_once_with() diff --git a/packages/google-cloud-ndb/tests/unit/test_key.py b/packages/google-cloud-ndb/tests/unit/test_key.py new file mode 100644 index 000000000000..58dbed48af8f --- /dev/null +++ b/packages/google-cloud-ndb/tests/unit/test_key.py @@ -0,0 +1,1178 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import base64 +import pickle + +from unittest import mock + +from google.cloud.datastore import _app_engine_key_pb2 +import google.cloud.datastore +import pytest + +from google.cloud.ndb import exceptions +from google.cloud.ndb import key as key_module +from google.cloud.ndb import model +from google.cloud.ndb import _options +from google.cloud.ndb import tasklets + +from . import utils + + +def test___all__(): + utils.verify___all__(key_module) + + +class TestKey: + URLSAFE = b"agZzfmZpcmVyDwsSBEtpbmQiBVRoaW5nDA" + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_default(): + key = key_module.Key("Kind", 42) + + assert key._key == google.cloud.datastore.Key("Kind", 42, project="testing") + assert key._reference is None + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_with_unicode(): + """Regression test for #322. + + https://github.com/googleapis/python-ndb/issues/322 + """ + key = key_module.Key("Kind", 42) + + assert key._key == google.cloud.datastore.Key("Kind", 42, project="testing") + assert key._reference is None + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_with_different_database(context): + context.client.database = "DiffDatabase" + key = key_module.Key("Kind", 42) + + assert key._key == google.cloud.datastore.Key( + "Kind", 42, project="testing", database="DiffDatabase" + ) + assert key._reference is None + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_with_different_namespace(context): + context.client.namespace = "DiffNamespace" + key = key_module.Key("Kind", 42) + + assert key._key == google.cloud.datastore.Key( + "Kind", 42, project="testing", namespace="DiffNamespace" + ) + assert key._reference is None + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_empty_path(): + with pytest.raises(TypeError): + key_module.Key(pairs=()) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_partial(): + with pytest.raises(ValueError): + key_module.Key("Kind") + + key = key_module.Key("Kind", None) + + assert key._key.is_partial + assert key._key.flat_path == ("Kind",) + assert key._key.project == "testing" + assert key._reference is None + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_invalid_id_type(): + with pytest.raises(TypeError): + key_module.Key("Kind", object()) + with pytest.raises(exceptions.BadArgumentError): + key_module.Key("Kind", None, "Also", 10) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_invalid_kind_type(): + with pytest.raises(TypeError): + key_module.Key(object(), 47) + with pytest.raises(AttributeError): + key_module.Key(object, 47) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_kind_as_model(): + class Simple(model.Model): + pass + + key = key_module.Key(Simple, 47) + assert key._key == google.cloud.datastore.Key("Simple", 47, project="testing") + assert key._reference is None + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_with_reference(): + reference = make_reference() + key = key_module.Key(reference=reference) + + assert key._key == google.cloud.datastore.Key( + "Parent", + 59, + "Child", + "Feather", + project="sample-app", + database="base", + namespace="space", + ) + assert key._reference is reference + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_with_serialized(): + serialized = b"j\x18s~sample-app-no-locationr\n\x0b\x12\x04Zorp\x18X\x0c" + key = key_module.Key(serialized=serialized) + + assert key._key == google.cloud.datastore.Key( + "Zorp", 88, project="sample-app-no-location" + ) + assert key._reference == make_reference( + path=({"type": "Zorp", "id": 88},), + app="s~sample-app-no-location", + database=None, + namespace=None, + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_with_serialized_with_database(): + serialized = b"j\x18s~sample-app-no-locationr\n\x0b\x12\x04Zorp\x18X\x0c\xba\x01\tsample-db" + key = key_module.Key(serialized=serialized) + + assert key._key == google.cloud.datastore.Key( + "Zorp", 88, project="sample-app-no-location", database="sample-db" + ) + assert key._reference == make_reference( + path=({"type": "Zorp", "id": 88},), + app="s~sample-app-no-location", + database="sample-db", + namespace=None, + ) + + @pytest.mark.usefixtures("in_context") + def test_constructor_with_urlsafe(self): + key = key_module.Key(urlsafe=self.URLSAFE) + + assert key._key == google.cloud.datastore.Key("Kind", "Thing", project="fire") + assert key._reference == make_reference( + path=({"type": "Kind", "name": "Thing"},), + app="s~fire", + database=None, + namespace=None, + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_with_pairs(): + key = key_module.Key(pairs=[("Kind", 1)]) + + assert key._key == google.cloud.datastore.Key("Kind", 1, project="testing") + assert key._reference is None + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_with_flat(): + key = key_module.Key(flat=["Kind", 1]) + + assert key._key == google.cloud.datastore.Key("Kind", 1, project="testing") + assert key._reference is None + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_with_flat_and_pairs(): + with pytest.raises(TypeError): + key_module.Key(pairs=[("Kind", 1)], flat=["Kind", 1]) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_with_app(): + key = key_module.Key("Kind", 10, app="s~foo") + + assert key._key == google.cloud.datastore.Key("Kind", 10, project="foo") + assert key._reference is None + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_with_project(): + key = key_module.Key("Kind", 10, project="foo") + + assert key._key == google.cloud.datastore.Key("Kind", 10, project="foo") + assert key._reference is None + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_with_project_and_app(): + with pytest.raises(TypeError): + key_module.Key("Kind", 10, project="foo", app="bar") + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_with_default_database_as_empty_string(): + key = key_module.Key("Kind", 1337, database="") + + assert key._key == google.cloud.datastore.Key("Kind", 1337, project="testing") + assert key.database() is None + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_with_database(): + key = key_module.Key("Kind", 1337, database="foo") + + assert key._key == google.cloud.datastore.Key( + "Kind", 1337, project="testing", database="foo" + ) + assert key.database() == "foo" + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_with_namespace(): + key = key_module.Key("Kind", 1337, namespace="foo") + + assert key._key == google.cloud.datastore.Key( + "Kind", 1337, project="testing", namespace="foo" + ) + assert key.namespace() == "foo" + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_with_default_namespace_as_empty_string(context): + context.client.namespace = "DiffNamespace" + key = key_module.Key("Kind", 1337, namespace="") + + assert key._key == google.cloud.datastore.Key("Kind", 1337, project="testing") + assert key.namespace() is None + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_with_default_namespace_as_None(context): + context.client.namespace = "DiffNamespace" + key = key_module.Key("Kind", 1337, namespace=None) + + assert key._key == google.cloud.datastore.Key("Kind", 1337, project="testing") + assert key.namespace() is None + + @pytest.mark.usefixtures("in_context") + def test_constructor_with_parent(self): + parent = key_module.Key(urlsafe=self.URLSAFE) + key = key_module.Key("Zip", 10, parent=parent) + + assert key._key == google.cloud.datastore.Key( + "Kind", "Thing", "Zip", 10, project="fire" + ) + assert key._reference is None + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_with_parent_and_database(): + parent = key_module.Key("Kind", "Thing", project="fire", database="foo") + key = key_module.Key("Zip", 10, parent=parent, database="foo") + + assert key._key == google.cloud.datastore.Key( + "Kind", "Thing", "Zip", 10, project="fire", database="foo" + ) + assert key._reference is None + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_with_parent_and_database_undefined(): + parent = key_module.Key("Kind", "Thing", project="fire", database="foo") + key = key_module.Key("Zip", 10, parent=parent) + + assert key._key == google.cloud.datastore.Key( + "Kind", "Thing", "Zip", 10, project="fire", database="foo" + ) + assert key._reference is None + + @pytest.mark.usefixtures("in_context") + def test_constructor_with_parent_and_namespace(self): + parent = key_module.Key(urlsafe=self.URLSAFE) + key = key_module.Key("Zip", 10, parent=parent, namespace=None) + + assert key._key == google.cloud.datastore.Key( + "Kind", "Thing", "Zip", 10, project="fire" + ) + assert key._reference is None + + @pytest.mark.usefixtures("in_context") + def test_constructor_with_parent_and_mismatched_namespace(self): + parent = key_module.Key(urlsafe=self.URLSAFE) + with pytest.raises(ValueError): + key_module.Key("Zip", 10, parent=parent, namespace="foo") + + @pytest.mark.usefixtures("in_context") + def test_constructor_with_parent_bad_type(self): + parent = mock.sentinel.parent + with pytest.raises(exceptions.BadValueError): + key_module.Key("Zip", 10, parent=parent) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_insufficient_args(): + with pytest.raises(TypeError): + key_module.Key(app="foo") + + @pytest.mark.usefixtures("in_context") + def test_no_subclass_for_reference(self): + class KeySubclass(key_module.Key): + pass + + with pytest.raises(TypeError): + KeySubclass(urlsafe=self.URLSAFE) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_invalid_argument_combination(): + with pytest.raises(TypeError): + key_module.Key(flat=["a", "b"], urlsafe=b"foo") + + @pytest.mark.usefixtures("in_context") + def test_colliding_reference_arguments(self): + urlsafe = self.URLSAFE + padding = b"=" * (-len(urlsafe) % 4) + serialized = base64.urlsafe_b64decode(urlsafe + padding) + + with pytest.raises(TypeError): + key_module.Key(urlsafe=urlsafe, serialized=serialized) + + @staticmethod + @mock.patch("google.cloud.ndb.key.Key.__init__") + def test__from_ds_key(key_init): + ds_key = google.cloud.datastore.Key("a", "b", project="c") + key = key_module.Key._from_ds_key(ds_key) + assert key._key is ds_key + assert key._reference is None + + key_init.assert_not_called() + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test___repr__defaults(): + key = key_module.Key("a", "b") + assert repr(key) == "Key('a', 'b')" + assert str(key) == "Key('a', 'b')" + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test___repr__non_defaults(): + key = key_module.Key("X", 11, app="foo", namespace="bar", database="baz") + assert ( + repr(key) == "Key('X', 11, project='foo', database='baz', namespace='bar')" + ) + assert ( + str(key) == "Key('X', 11, project='foo', database='baz', namespace='bar')" + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test___hash__(): + key1 = key_module.Key("a", 1) + assert hash(key1) == hash(key1) + assert hash(key1) == hash(key1.pairs()) + key2 = key_module.Key("a", 2) + assert hash(key1) != hash(key2) + + @staticmethod + def test__tuple(): + key = key_module.Key("X", 11, app="foo", database="d", namespace="n") + assert key._tuple() == ("foo", "n", "d", (("X", 11),)) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test___eq__(): + key1 = key_module.Key("X", 11, app="foo", namespace="n") + key2 = key_module.Key("Y", 12, app="foo", namespace="n") + key3 = key_module.Key("X", 11, app="bar", namespace="n") + key4 = key_module.Key("X", 11, app="foo", namespace="m") + key5 = mock.sentinel.key + assert key1 == key1 + assert not key1 == key2 + assert not key1 == key3 + assert not key1 == key4 + assert not key1 == key5 + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test___ne__(): + key1 = key_module.Key("X", 11, app="foo", namespace="n") + key2 = key_module.Key("Y", 12, app="foo", namespace="n") + key3 = key_module.Key("X", 11, app="bar", namespace="n") + key4 = key_module.Key("X", 11, app="foo", namespace="m") + key5 = mock.sentinel.key + key6 = key_module.Key("X", 11, app="foo", namespace="n") + assert not key1 != key1 + assert key1 != key2 + assert key1 != key3 + assert key1 != key4 + assert key1 != key5 + assert not key1 != key6 + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test___lt__(): + key1 = key_module.Key("X", 11, app="foo", namespace="n") + key2 = key_module.Key("Y", 12, app="foo", namespace="n") + key3 = key_module.Key("X", 11, app="goo", namespace="n") + key4 = key_module.Key("X", 11, app="foo", namespace="o") + key5 = mock.sentinel.key + key6 = key_module.Key("X", 11, app="foo", database="db", namespace="n") + key7 = key_module.Key("X", 11, app="foo", database="db2", namespace="n") + assert not key1 < key1 + assert key1 < key2 + assert key1 < key3 + assert key1 < key4 + with pytest.raises(TypeError): + key1 < key5 + assert key1 < key6 + assert key6 < key7 + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test___le__(): + key1 = key_module.Key("X", 11, app="foo", namespace="n") + key2 = key_module.Key("Y", 12, app="foo", namespace="n") + key3 = key_module.Key("X", 11, app="goo", namespace="n") + key4 = key_module.Key("X", 11, app="foo", namespace="o") + key5 = mock.sentinel.key + key6 = key_module.Key("X", 11, app="foo", database="db", namespace="n") + key7 = key_module.Key("X", 11, app="foo", database="db2", namespace="n") + assert key1 <= key1 + assert key1 <= key2 + assert key1 <= key3 + assert key1 <= key4 + with pytest.raises(TypeError): + key1 <= key5 + assert key1 <= key6 + assert key6 <= key7 + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test___gt__(): + key1 = key_module.Key("X", 11, app="foo", namespace="n") + key2 = key_module.Key("M", 10, app="foo", namespace="n") + key3 = key_module.Key("X", 11, app="boo", namespace="n") + key4 = key_module.Key("X", 11, app="foo", namespace="a") + key5 = mock.sentinel.key + key6 = key_module.Key("X", 11, app="foo", database="db", namespace="n") + key7 = key_module.Key("X", 11, app="foo", database="db2", namespace="n") + assert not key1 > key1 + assert key1 > key2 + assert key1 > key3 + assert key1 > key4 + with pytest.raises(TypeError): + key1 > key5 + assert key6 > key1 + assert key7 > key6 + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test___ge__(): + key1 = key_module.Key("X", 11, app="foo", namespace="n") + key2 = key_module.Key("M", 10, app="foo", namespace="n") + key3 = key_module.Key("X", 11, app="boo", namespace="n") + key4 = key_module.Key("X", 11, app="foo", namespace="a") + key5 = mock.sentinel.key + key6 = key_module.Key("X", 11, app="foo", database="db", namespace="n") + key7 = key_module.Key("X", 11, app="foo", database="db2", namespace="n") + assert key1 >= key1 + assert key1 >= key2 + assert key1 >= key3 + assert key1 >= key4 + with pytest.raises(TypeError): + key1 >= key5 + assert key6 >= key1 + assert key7 >= key6 + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_pickling(): + key = key_module.Key("a", "b", app="c", namespace="d") + pickled = pickle.dumps(key) + unpickled = pickle.loads(pickled) + assert key == unpickled + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_pickling_with_default_database(): + key = key_module.Key("a", "b", app="c", namespace="d", database="") + pickled = pickle.dumps(key) + unpickled = pickle.loads(pickled) + assert key == unpickled + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_pickling_with_database(): + key = key_module.Key("a", "b", app="c", namespace="d", database="e") + pickled = pickle.dumps(key) + unpickled = pickle.loads(pickled) + assert key == unpickled + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test___setstate__bad_state(): + key = key_module.Key("a", "b") + + state = ("not", "length", "one") + with pytest.raises(TypeError): + key.__setstate__(state) + + state = ("not-a-dict",) + with pytest.raises(TypeError): + key.__setstate__(state) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_parent(): + key = key_module.Key("a", "b", "c", "d") + parent = key.parent() + assert parent._key == key._key.parent + assert parent._reference is None + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_parent_top_level(): + key = key_module.Key("This", "key") + assert key.parent() is None + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_root(): + key = key_module.Key("a", "b", "c", "d") + root = key.root() + assert root._key == key._key.parent + assert root._reference is None + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_root_top_level(): + key = key_module.Key("This", "key") + assert key.root() is key + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_namespace(): + namespace = "my-space" + key = key_module.Key("abc", 1, namespace=namespace) + assert key.namespace() == namespace + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_app(): + app = "s~example" + key = key_module.Key("X", 100, app=app) + assert key.app() != app + assert key.app() == app[2:] + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_id(): + for id_or_name in ("x", 11, None): + key = key_module.Key("Kind", id_or_name) + assert key.id() == id_or_name + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_string_id(): + pairs = (("x", "x"), (11, None), (None, None)) + for id_or_name, expected in pairs: + key = key_module.Key("Kind", id_or_name) + assert key.string_id() == expected + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_integer_id(): + pairs = (("x", None), (11, 11), (None, None)) + for id_or_name, expected in pairs: + key = key_module.Key("Kind", id_or_name) + assert key.integer_id() == expected + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_pairs(): + key = key_module.Key("a", "b") + assert key.pairs() == (("a", "b"),) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_pairs_partial_key(): + key = key_module.Key("This", "key", "that", None) + assert key.pairs() == (("This", "key"), ("that", None)) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_flat(): + key = key_module.Key("This", "key") + assert key.flat() == ("This", "key") + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_flat_partial_key(): + key = key_module.Key("Kind", None) + assert key.flat() == ("Kind", None) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_kind(): + key = key_module.Key("This", "key") + assert key.kind() == "This" + key = key_module.Key("a", "b", "c", "d") + assert key.kind() == "c" + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_reference(): + key = key_module.Key( + "This", "key", app="fire", database="db", namespace="namespace" + ) + assert key.reference() == make_reference( + path=({"type": "This", "name": "key"},), + app="fire", + database="db", + namespace="namespace", + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_reference_cached(): + key = key_module.Key("This", "key") + key._reference = mock.sentinel.reference + assert key.reference() is mock.sentinel.reference + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_reference_bad_kind(): + too_long = "a" * (key_module._MAX_KEYPART_BYTES + 1) + for kind in ("", too_long): + key = key_module.Key(kind, "key", app="app") + with pytest.raises(ValueError): + key.reference() + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_reference_bad_string_id(): + too_long = "a" * (key_module._MAX_KEYPART_BYTES + 1) + for id_ in ("", too_long): + key = key_module.Key("kind", id_, app="app") + with pytest.raises(ValueError): + key.reference() + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_reference_bad_integer_id(): + for id_ in (-10, 0, 2**64): + key = key_module.Key("kind", id_, app="app") + with pytest.raises(ValueError): + key.reference() + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_serialized(): + key = key_module.Key("a", 108, app="c") + assert key.serialized() == b"j\x01cr\x07\x0b\x12\x01a\x18l\x0c" + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_urlsafe(): + key = key_module.Key("d", None, app="f") + assert key.urlsafe() == b"agFmcgULEgFkDA" + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_to_legacy_urlsafe(): + key = key_module.Key("d", 123, app="f") + assert key.to_legacy_urlsafe(location_prefix="s~") == b"agNzfmZyBwsSAWQYeww" + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_to_legacy_urlsafe_name(): + key = key_module.Key("d", "x", app="f") + assert key.to_legacy_urlsafe(location_prefix="s~") == b"agNzfmZyCAsSAWQiAXgM" + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_to_legacy_urlsafe_w_ancestor(): + """Regression test for #478. + + https://github.com/googleapis/python-ndb/issues/478 + """ + key = key_module.Key("d", 123, "e", 234, app="f") + urlsafe = key.to_legacy_urlsafe(location_prefix="s~") + key2 = key_module.Key(urlsafe=urlsafe) + assert key == key2 + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_to_legacy_urlsafe_named_database_unsupported(): + key = key_module.Key("d", 123, database="anydb") + with pytest.raises( + ValueError, match="to_legacy_urlsafe only supports the default database" + ): + key.to_legacy_urlsafe(location_prefix="s~") + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_api") + @mock.patch("google.cloud.ndb.model._entity_from_protobuf") + def test_get_with_cache_miss(_entity_from_protobuf, _datastore_api): + class Simple(model.Model): + pass + + ds_future = tasklets.Future() + ds_future.set_result("ds_entity") + _datastore_api.lookup.return_value = ds_future + _entity_from_protobuf.return_value = "the entity" + + key = key_module.Key("Simple", "b", app="c") + assert key.get(use_cache=True) == "the entity" + + _datastore_api.lookup.assert_called_once_with( + key._key, _options.ReadOptions(use_cache=True) + ) + _entity_from_protobuf.assert_called_once_with("ds_entity") + + @staticmethod + @mock.patch("google.cloud.ndb._datastore_api") + @mock.patch("google.cloud.ndb.model._entity_from_protobuf") + def test_get_with_cache_hit(_entity_from_protobuf, _datastore_api, in_context): + class Simple(model.Model): + pass + + ds_future = tasklets.Future() + ds_future.set_result("ds_entity") + _datastore_api.lookup.return_value = ds_future + _entity_from_protobuf.return_value = "the entity" + + key = key_module.Key("Simple", "b", app="c") + mock_cached_entity = mock.Mock(_key=key) + in_context.cache[key] = mock_cached_entity + assert key.get(use_cache=True) == mock_cached_entity + + _datastore_api.lookup.assert_not_called() + _entity_from_protobuf.assert_not_called() + + @staticmethod + @mock.patch("google.cloud.ndb._datastore_api") + @mock.patch("google.cloud.ndb.model._entity_from_protobuf") + def test_get_no_cache(_entity_from_protobuf, _datastore_api, in_context): + class Simple(model.Model): + pass + + ds_future = tasklets.Future() + ds_future.set_result("ds_entity") + _datastore_api.lookup.return_value = ds_future + _entity_from_protobuf.return_value = "the entity" + + key = key_module.Key("Simple", "b", app="c") + mock_cached_entity = mock.Mock(_key=key) + in_context.cache[key] = mock_cached_entity + assert key.get(use_cache=False) == "the entity" + + _datastore_api.lookup.assert_called_once_with( + key._key, _options.ReadOptions(use_cache=False) + ) + _entity_from_protobuf.assert_called_once_with("ds_entity") + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_api") + @mock.patch("google.cloud.ndb.model._entity_from_protobuf") + def test_get_w_hooks(_entity_from_protobuf, _datastore_api): + class Simple(model.Model): + pre_get_calls = [] + post_get_calls = [] + + @classmethod + def _pre_get_hook(cls, *args, **kwargs): + cls.pre_get_calls.append((args, kwargs)) + + @classmethod + def _post_get_hook(cls, key, future, *args, **kwargs): + assert isinstance(future, tasklets.Future) + cls.post_get_calls.append(((key,) + args, kwargs)) + + ds_future = tasklets.Future() + ds_future.set_result("ds_entity") + _datastore_api.lookup.return_value = ds_future + _entity_from_protobuf.return_value = "the entity" + + key = key_module.Key("Simple", 42) + assert key.get() == "the entity" + + _datastore_api.lookup.assert_called_once_with(key._key, _options.ReadOptions()) + _entity_from_protobuf.assert_called_once_with("ds_entity") + + assert Simple.pre_get_calls == [((key,), {})] + assert Simple.post_get_calls == [((key,), {})] + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_api") + @mock.patch("google.cloud.ndb.model._entity_from_protobuf") + def test_get_async(_entity_from_protobuf, _datastore_api): + ds_future = tasklets.Future() + _datastore_api.lookup.return_value = ds_future + _entity_from_protobuf.return_value = "the entity" + + key = key_module.Key("a", "b", app="c") + future = key.get_async() + ds_future.set_result("ds_entity") + assert future.result() == "the entity" + + _datastore_api.lookup.assert_called_once_with(key._key, _options.ReadOptions()) + _entity_from_protobuf.assert_called_once_with("ds_entity") + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_api") + def test_get_async_not_found(_datastore_api): + ds_future = tasklets.Future() + _datastore_api.lookup.return_value = ds_future + + key = key_module.Key("a", "b", app="c") + future = key.get_async() + ds_future.set_result(_datastore_api._NOT_FOUND) + assert future.result() is None + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_api") + def test_delete(_datastore_api): + class Simple(model.Model): + pass + + future = tasklets.Future() + _datastore_api.delete.return_value = future + future.set_result("result") + + key = key_module.Key("Simple", "b", app="c") + assert key.delete() == "result" + _datastore_api.delete.assert_called_once_with(key._key, _options.Options()) + + @staticmethod + @mock.patch("google.cloud.ndb._datastore_api") + def test_delete_with_cache(_datastore_api, in_context): + class Simple(model.Model): + pass + + future = tasklets.Future() + _datastore_api.delete.return_value = future + future.set_result("result") + + key = key_module.Key("Simple", "b", app="c") + mock_cached_entity = mock.Mock(_key=key) + in_context.cache[key] = mock_cached_entity + + assert key.delete(use_cache=True) == "result" + assert in_context.cache[key] is None + _datastore_api.delete.assert_called_once_with( + key._key, _options.Options(use_cache=True) + ) + + @staticmethod + @mock.patch("google.cloud.ndb._datastore_api") + def test_delete_no_cache(_datastore_api, in_context): + class Simple(model.Model): + pass + + future = tasklets.Future() + _datastore_api.delete.return_value = future + future.set_result("result") + + key = key_module.Key("Simple", "b", app="c") + mock_cached_entity = mock.Mock(_key=key) + in_context.cache[key] = mock_cached_entity + + assert key.delete(use_cache=False) == "result" + assert in_context.cache[key] == mock_cached_entity + _datastore_api.delete.assert_called_once_with( + key._key, _options.Options(use_cache=False) + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_api") + def test_delete_w_hooks(_datastore_api): + class Simple(model.Model): + pre_delete_calls = [] + post_delete_calls = [] + + @classmethod + def _pre_delete_hook(cls, *args, **kwargs): + cls.pre_delete_calls.append((args, kwargs)) + + @classmethod + def _post_delete_hook(cls, key, future, *args, **kwargs): + assert isinstance(future, tasklets.Future) + cls.post_delete_calls.append(((key,) + args, kwargs)) + + future = tasklets.Future() + _datastore_api.delete.return_value = future + future.set_result("result") + + key = key_module.Key("Simple", 42) + assert key.delete() == "result" + _datastore_api.delete.assert_called_once_with(key._key, _options.Options()) + + assert Simple.pre_delete_calls == [((key,), {})] + assert Simple.post_delete_calls == [((key,), {})] + + @staticmethod + @mock.patch("google.cloud.ndb._datastore_api") + def test_delete_in_transaction(_datastore_api, in_context): + future = tasklets.Future() + _datastore_api.delete.return_value = future + + with in_context.new(transaction=b"tx123").use(): + key = key_module.Key("a", "b", app="c") + assert key.delete() is None + _datastore_api.delete.assert_called_once_with(key._key, _options.Options()) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_api") + def test_delete_async(_datastore_api): + key = key_module.Key("a", "b", app="c") + + future = tasklets.Future() + _datastore_api.delete.return_value = future + future.set_result("result") + + result = key.delete_async().get_result() + + _datastore_api.delete.assert_called_once_with(key._key, _options.Options()) + assert result == "result" + + @staticmethod + def test_from_old_key(): + with pytest.raises(NotImplementedError): + key_module.Key.from_old_key(None) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_to_old_key(): + key = key_module.Key("a", "b") + with pytest.raises(NotImplementedError): + key.to_old_key() + + +class Test__project_from_app: + @staticmethod + def test_already_clean(): + app = "my-prahjekt" + assert key_module._project_from_app(app) == app + + @staticmethod + def test_prefixed(): + project = "my-prahjekt" + for prefix in ("s", "e", "dev"): + app = "{}~{}".format(prefix, project) + assert key_module._project_from_app(app) == project + + @staticmethod + def test_app_fallback(context): + context.client.project = "s~jectpro" + with context.use(): + assert key_module._project_from_app(None) == "jectpro" + + +class Test__from_reference: + def test_basic(self): + reference = make_reference() + ds_key = key_module._from_reference(reference, None, None, None) + assert ds_key == google.cloud.datastore.Key( + "Parent", + 59, + "Child", + "Feather", + project="sample-app", + database="base", + namespace="space", + ) + + def test_matching_app(self): + reference = make_reference() + ds_key = key_module._from_reference(reference, "s~sample-app", None, None) + assert ds_key == google.cloud.datastore.Key( + "Parent", + 59, + "Child", + "Feather", + project="sample-app", + database="base", + namespace="space", + ) + + def test_differing_app(self): + reference = make_reference() + with pytest.raises(RuntimeError): + key_module._from_reference(reference, "pickles", None, None) + + def test_matching_namespace(self): + reference = make_reference() + ds_key = key_module._from_reference(reference, None, "space", None) + assert ds_key == google.cloud.datastore.Key( + "Parent", + 59, + "Child", + "Feather", + project="sample-app", + database="base", + namespace="space", + ) + + def test_differing_namespace(self): + reference = make_reference() + with pytest.raises(RuntimeError): + key_module._from_reference(reference, None, "pickles", None) + + def test_matching_database(self): + reference = make_reference() + ds_key = key_module._from_reference(reference, None, None, "base") + assert ds_key == google.cloud.datastore.Key( + "Parent", + 59, + "Child", + "Feather", + project="sample-app", + database="base", + namespace="space", + ) + + def test_differing_database(self): + reference = make_reference() + with pytest.raises(RuntimeError): + key_module._from_reference(reference, None, None, "turtles") + + +class Test__from_serialized: + @staticmethod + def test_basic(): + serialized = b'j\x0cs~sample-appr\x1e\x0b\x12\x06Parent\x18;\x0c\x0b\x12\x05Child"\x07Feather\x0c\xa2\x01\x05space\xba\x01\x04base' + ds_key, reference = key_module._from_serialized(serialized, None, None, None) + assert ds_key == google.cloud.datastore.Key( + "Parent", + 59, + "Child", + "Feather", + project="sample-app", + database="base", + namespace="space", + ) + assert reference == make_reference() + + @staticmethod + def test_no_app_prefix(): + serialized = b"j\x18s~sample-app-no-locationr\n\x0b\x12\x04Zorp\x18X\x0c" + ds_key, reference = key_module._from_serialized(serialized, None, None, None) + assert ds_key == google.cloud.datastore.Key( + "Zorp", 88, project="sample-app-no-location" + ) + assert reference == make_reference( + path=({"type": "Zorp", "id": 88},), + app="s~sample-app-no-location", + database=None, + namespace=None, + ) + + +class Test__from_urlsafe: + @staticmethod + def test_basic(): + urlsafe = ( + "agxzfnNhbXBsZS1hcHByHgsSBlBhcmVudBg7DAsSBUNoaWxkIgdGZ" + "WF0aGVyDKIBBXNwYWNl" + ) + urlsafe_bytes = urlsafe.encode("ascii") + for value in (urlsafe, urlsafe_bytes): + ds_key, reference = key_module._from_urlsafe(value, None, None, None) + assert ds_key == google.cloud.datastore.Key( + "Parent", + 59, + "Child", + "Feather", + project="sample-app", + database=None, + namespace="space", + ) + assert reference == make_reference(database=None) + + @staticmethod + def test_needs_padding(): + urlsafe = b"agZzfmZpcmVyDwsSBEtpbmQiBVRoaW5nDA" + + ds_key, reference = key_module._from_urlsafe(urlsafe, None, None, None) + assert ds_key == google.cloud.datastore.Key("Kind", "Thing", project="fire") + assert reference == make_reference( + path=({"type": "Kind", "name": "Thing"},), + app="s~fire", + database=None, + namespace=None, + ) + + +class Test__constructor_handle_positional: + @staticmethod + def test_with_path(): + args = ("Kind", 1) + kwargs = {} + key_module._constructor_handle_positional(args, kwargs) + assert kwargs == {"flat": args} + + @staticmethod + def test_path_collide_flat(): + args = ("Kind", 1) + kwargs = {"flat": ("OtherKind", "Cheese")} + with pytest.raises(TypeError): + key_module._constructor_handle_positional(args, kwargs) + + @staticmethod + def test_dict_positional(): + args = ({"flat": ("OtherKind", "Cheese"), "app": "ehp"},) + kwargs = {} + key_module._constructor_handle_positional(args, kwargs) + assert kwargs == args[0] + + @staticmethod + def test_dict_positional_with_other_kwargs(): + args = ({"flat": ("OtherKind", "Cheese"), "app": "ehp"},) + kwargs = {"namespace": "over-here", "database": "over-there"} + with pytest.raises(TypeError): + key_module._constructor_handle_positional(args, kwargs) + + +def make_reference( + path=({"type": "Parent", "id": 59}, {"type": "Child", "name": "Feather"}), + app="s~sample-app", + database="base", + namespace="space", +): + elements = [_app_engine_key_pb2.Path.Element(**element) for element in path] + return _app_engine_key_pb2.Reference( + app=app, + path=_app_engine_key_pb2.Path(element=elements), + database_id=database, + name_space=namespace, + ) diff --git a/packages/google-cloud-ndb/tests/unit/test_metadata.py b/packages/google-cloud-ndb/tests/unit/test_metadata.py new file mode 100644 index 000000000000..a3aa5c85f8ab --- /dev/null +++ b/packages/google-cloud-ndb/tests/unit/test_metadata.py @@ -0,0 +1,389 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from unittest import mock + +import pytest + +from google.cloud.ndb import exceptions +from google.cloud.ndb import metadata +from google.cloud.ndb import key as key_module +from google.cloud.ndb import tasklets + +from . import utils + + +def test___all__(): + utils.verify___all__(metadata) + + +class Test_BaseMetadata: + @staticmethod + def test_get_kind(): + kind = metadata._BaseMetadata.KIND_NAME + assert metadata._BaseMetadata._get_kind() == kind + + @staticmethod + def test_cannot_instantiate(): + with pytest.raises(TypeError): + metadata._BaseMetadata() + + +class TestEntityGroup: + @staticmethod + def test_constructor(): + with pytest.raises(exceptions.NoLongerImplementedError): + metadata.EntityGroup() + + +class TestKind: + @staticmethod + def test_get_kind(): + kind = metadata.Kind.KIND_NAME + assert metadata.Kind._get_kind() == kind + + @staticmethod + def test_constructor(): + kind = metadata.Kind() + assert kind.__dict__ == {"_values": {}} + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_key_for_kind(): + key = key_module.Key(metadata.Kind.KIND_NAME, "test") + assert key == metadata.Kind.key_for_kind("test") + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_key_to_kind(): + key = key_module.Key(metadata.Kind.KIND_NAME, "test") + assert metadata.Kind.key_to_kind(key) == "test" + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_kind_name(): + key = key_module.Key(metadata.Kind.KIND_NAME, "test") + kind = metadata.Kind(key=key) + assert kind.kind_name == "test" + + +class TestNamespace: + @staticmethod + def test_get_kind(): + kind = metadata.Namespace.KIND_NAME + assert metadata.Namespace._get_kind() == kind + + @staticmethod + def test_constructor(): + namespace = metadata.Namespace() + assert namespace.__dict__ == {"_values": {}} + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_key_for_namespace(): + key = key_module.Key(metadata.Namespace.KIND_NAME, "test") + assert key == metadata.Namespace.key_for_namespace("test") + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_key_for_namespace_default(): + key = key_module.Key(metadata.Namespace.KIND_NAME, "") + assert key == metadata.Namespace.key_for_namespace("") + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_key_for_namespace_empty(): + key = key_module.Key( + metadata.Namespace.KIND_NAME, metadata.Namespace.EMPTY_NAMESPACE_ID + ) + assert key == metadata.Namespace.key_for_namespace(None) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_key_to_namespace(): + key = key_module.Key(metadata.Namespace.KIND_NAME, "test") + assert metadata.Namespace.key_to_namespace(key) == "test" + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_namespace_name(): + key = key_module.Key(metadata.Namespace.KIND_NAME, "test") + namespace = metadata.Namespace(key=key) + assert namespace.namespace_name == "test" + + +class TestProperty: + @staticmethod + def test_get_kind(): + kind = metadata.Property.KIND_NAME + assert metadata.Property._get_kind() == kind + + @staticmethod + def test_constructor(): + property = metadata.Property() + assert property.__dict__ == {"_values": {}} + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_key_for_kind(): + key = key_module.Key(metadata.Kind.KIND_NAME, "test") + assert key == metadata.Property.key_for_kind("test") + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_key_to_kind(): + kind = key_module.Key(metadata.Kind.KIND_NAME, "test") + assert metadata.Property.key_to_kind(kind) == "test" + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_kind_name(): + key = key_module.Key( + metadata.Kind.KIND_NAME, + "test", + metadata.Property.KIND_NAME, + "test2", + ) + property = metadata.Property(key=key) + assert property.kind_name == "test" + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_key_for_property(): + key = key_module.Key( + metadata.Kind.KIND_NAME, + "test", + metadata.Property.KIND_NAME, + "test2", + ) + assert key == metadata.Property.key_for_property("test", "test2") + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_key_to_property(): + kind = key_module.Key(metadata.Property.KIND_NAME, "test") + assert metadata.Property.key_to_property(kind) == "test" + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_key_to_property_only_kind(): + kind = key_module.Key(metadata.Kind.KIND_NAME, "test") + assert metadata.Property.key_to_property(kind) is None + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_property_name(): + key = key_module.Key( + metadata.Kind.KIND_NAME, + "test", + metadata.Property.KIND_NAME, + "test2", + ) + property = metadata.Property(key=key) + assert property.property_name == "test2" + + +@pytest.mark.usefixtures("in_context") +def test_get_entity_group_version(*args, **kwargs): + with pytest.raises(exceptions.NoLongerImplementedError): + metadata.get_entity_group_version() + + +@pytest.mark.usefixtures("in_context") +@mock.patch("google.cloud.ndb._datastore_query") +def test_get_kinds(_datastore_query): + future = tasklets.Future("fetch") + future.set_result([]) + _datastore_query.fetch.return_value = future + kinds = metadata.get_kinds() + assert kinds == [] + + +@pytest.mark.usefixtures("in_context") +@mock.patch("google.cloud.ndb._datastore_query") +@mock.patch("google.cloud.ndb.query.Query") +def test_get_kinds_with_start(Query, _datastore_query): + future = tasklets.Future("fetch") + future.set_result([]) + _datastore_query.fetch.return_value = future + query = Query.return_value + kinds = metadata.get_kinds(start="a") + assert kinds == [] + query.filter.assert_called_once() + + +@pytest.mark.usefixtures("in_context") +@mock.patch("google.cloud.ndb._datastore_query") +@mock.patch("google.cloud.ndb.query.Query") +def test_get_kinds_with_end(Query, _datastore_query): + future = tasklets.Future("fetch") + future.set_result([]) + _datastore_query.fetch.return_value = future + query = Query.return_value + kinds = metadata.get_kinds(end="z") + assert kinds == [] + query.filter.assert_called_once() + + +@pytest.mark.usefixtures("in_context") +@mock.patch("google.cloud.ndb._datastore_query") +def test_get_kinds_empty_end(_datastore_query): + future = tasklets.Future("fetch") + future.set_result(["not", "empty"]) + _datastore_query.fetch.return_value = future + kinds = metadata.get_kinds(end="") + assert kinds == [] + + +@pytest.mark.usefixtures("in_context") +@mock.patch("google.cloud.ndb._datastore_query") +def test_get_namespaces(_datastore_query): + future = tasklets.Future("fetch") + future.set_result([]) + _datastore_query.fetch.return_value = future + names = metadata.get_namespaces() + assert names == [] + + +@pytest.mark.usefixtures("in_context") +@mock.patch("google.cloud.ndb._datastore_query") +@mock.patch("google.cloud.ndb.query.Query") +def test_get_namespaces_with_start(Query, _datastore_query): + future = tasklets.Future("fetch") + future.set_result([]) + _datastore_query.fetch.return_value = future + query = Query.return_value + names = metadata.get_namespaces(start="a") + assert names == [] + query.filter.assert_called_once() + + +@pytest.mark.usefixtures("in_context") +@mock.patch("google.cloud.ndb._datastore_query") +@mock.patch("google.cloud.ndb.query.Query") +def test_get_namespaces_with_end(Query, _datastore_query): + future = tasklets.Future("fetch") + future.set_result([]) + _datastore_query.fetch.return_value = future + query = Query.return_value + names = metadata.get_namespaces(end="z") + assert names == [] + query.filter.assert_called_once() + + +@pytest.mark.usefixtures("in_context") +@mock.patch("google.cloud.ndb._datastore_query") +def test_get_properties_of_kind(_datastore_query): + future = tasklets.Future("fetch") + future.set_result([]) + _datastore_query.fetch.return_value = future + props = metadata.get_properties_of_kind("AnyKind") + assert props == [] + + +@pytest.mark.usefixtures("in_context") +@mock.patch("google.cloud.ndb._datastore_query") +@mock.patch("google.cloud.ndb.query.Query") +def test_get_properties_of_kind_with_start(Query, _datastore_query): + future = tasklets.Future("fetch") + future.set_result([]) + _datastore_query.fetch.return_value = future + query = Query.return_value + props = metadata.get_properties_of_kind("AnyKind", start="a") + assert props == [] + query.filter.assert_called_once() + + +@pytest.mark.usefixtures("in_context") +@mock.patch("google.cloud.ndb._datastore_query") +@mock.patch("google.cloud.ndb.query.Query") +def test_get_properties_of_kind_with_end(Query, _datastore_query): + future = tasklets.Future("fetch") + future.set_result([]) + _datastore_query.fetch.return_value = future + query = Query.return_value + props = metadata.get_properties_of_kind("AnyKind", end="z") + assert props == [] + query.filter.assert_called_once() + + +@pytest.mark.usefixtures("in_context") +@mock.patch("google.cloud.ndb._datastore_query") +def test_get_properties_of_kind_empty_end(_datastore_query): + future = tasklets.Future("fetch") + future.set_result(["not", "empty"]) + _datastore_query.fetch.return_value = future + props = metadata.get_properties_of_kind("AnyKind", end="") + assert props == [] + + +@pytest.mark.usefixtures("in_context") +@mock.patch("google.cloud.ndb._datastore_query") +def test_get_representations_of_kind(_datastore_query): + future = tasklets.Future("fetch") + future.set_result([]) + _datastore_query.fetch.return_value = future + reps = metadata.get_representations_of_kind("AnyKind") + assert reps == {} + + +@pytest.mark.usefixtures("in_context") +@mock.patch("google.cloud.ndb._datastore_query") +def test_get_representations_of_kind_with_results(_datastore_query): + class MyProp: + property_name = "myprop" + property_representation = "STR" + + myprop = MyProp() + future = tasklets.Future("fetch") + future.set_result([myprop]) + _datastore_query.fetch.return_value = future + reps = metadata.get_representations_of_kind("MyModel") + assert reps == {"myprop": "STR"} + + +@pytest.mark.usefixtures("in_context") +@mock.patch("google.cloud.ndb._datastore_query") +@mock.patch("google.cloud.ndb.query.Query") +def test_get_representations_of_kind_with_start(Query, _datastore_query): + future = tasklets.Future("fetch") + future.set_result([]) + _datastore_query.fetch.return_value = future + query = Query.return_value + reps = metadata.get_representations_of_kind("AnyKind", start="a") + assert reps == {} + query.filter.assert_called_once() + + +@pytest.mark.usefixtures("in_context") +@mock.patch("google.cloud.ndb._datastore_query") +@mock.patch("google.cloud.ndb.query.Query") +def test_get_representations_of_kind_with_end(Query, _datastore_query): + future = tasklets.Future("fetch") + future.set_result([]) + _datastore_query.fetch.return_value = future + query = Query.return_value + reps = metadata.get_representations_of_kind("AnyKind", end="z") + assert reps == {} + query.filter.assert_called_once() + + +@pytest.mark.usefixtures("in_context") +@mock.patch("google.cloud.ndb._datastore_query") +def test_get_representations_of_kind_empty_end(_datastore_query): + future = tasklets.Future("fetch") + future.set_result([]) + _datastore_query.fetch.return_value = future + reps = metadata.get_representations_of_kind("AnyKind", end="") + assert reps == {} diff --git a/packages/google-cloud-ndb/tests/unit/test_model.py b/packages/google-cloud-ndb/tests/unit/test_model.py new file mode 100644 index 000000000000..14ae8efbe610 --- /dev/null +++ b/packages/google-cloud-ndb/tests/unit/test_model.py @@ -0,0 +1,6659 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime +import pickle +import pytz +import types +import zlib + +from unittest import mock + +from google.cloud import datastore +from google.cloud.datastore import entity as entity_module +from google.cloud.datastore import key as ds_key_module +from google.cloud.datastore import helpers +from google.cloud.datastore_v1 import types as ds_types +from google.cloud.datastore_v1.types import entity as entity_pb2 +import pytest + +from google.cloud.ndb import _datastore_types +from google.cloud.ndb import exceptions +from google.cloud.ndb import key as key_module +from google.cloud.ndb import model +from google.cloud.ndb import _options +from google.cloud.ndb import polymodel +from google.cloud.ndb import query as query_module +from google.cloud.ndb import tasklets +from google.cloud.ndb import utils as ndb_utils +from google.cloud.ndb import _legacy_entity_pb + +from . import utils + + +class timezone(datetime.tzinfo): + def __init__(self, offset): + self.offset = datetime.timedelta(hours=offset) + + def utcoffset(self, dt): + return self.offset + + def dst(self, dt): + return datetime.timedelta(0) + + def __eq__(self, other): + return self.offset == other.offset + + +def test___all__(): + utils.verify___all__(model) + + +def test_Key(): + assert model.Key is key_module.Key + + +def test_BlobKey(): + assert model.BlobKey is _datastore_types.BlobKey + + +def test_GeoPt(): + assert model.GeoPt is helpers.GeoPoint + + +class TestIndexProperty: + @staticmethod + def test_constructor(): + index_prop = model.IndexProperty(name="a", direction="asc") + assert index_prop._name == "a" + assert index_prop._direction == "asc" + + @staticmethod + def test_name(): + index_prop = model.IndexProperty(name="b", direction="asc") + assert index_prop.name == "b" + + @staticmethod + def test_direction(): + index_prop = model.IndexProperty(name="a", direction="desc") + assert index_prop.direction == "desc" + + @staticmethod + def test___repr__(): + index_prop = model.IndexProperty(name="c", direction="asc") + assert repr(index_prop) == "IndexProperty(name='c', direction='asc')" + + @staticmethod + def test___eq__(): + index_prop1 = model.IndexProperty(name="d", direction="asc") + index_prop2 = model.IndexProperty(name="d", direction="desc") + index_prop3 = mock.sentinel.index_prop + assert index_prop1 == index_prop1 + assert not index_prop1 == index_prop2 + assert not index_prop1 == index_prop3 + + @staticmethod + def test___ne__(): + index_prop1 = model.IndexProperty(name="d", direction="asc") + index_prop2 = model.IndexProperty(name="d", direction="desc") + index_prop3 = mock.sentinel.index_prop + index_prop4 = model.IndexProperty(name="d", direction="asc") + assert not index_prop1 != index_prop1 + assert index_prop1 != index_prop2 + assert index_prop1 != index_prop3 + assert not index_prop1 != index_prop4 + + @staticmethod + def test___hash__(): + index_prop1 = model.IndexProperty(name="zip", direction="asc") + index_prop2 = model.IndexProperty(name="zip", direction="asc") + assert index_prop1 is not index_prop2 + assert hash(index_prop1) == hash(index_prop2) + assert hash(index_prop1) == hash(("zip", "asc")) + + +class TestIndex: + @staticmethod + def test_constructor(): + index_prop = model.IndexProperty(name="a", direction="asc") + index = model.Index(kind="IndK", properties=(index_prop,), ancestor=False) + assert index._kind == "IndK" + assert index._properties == (index_prop,) + assert not index._ancestor + + @staticmethod + def test_kind(): + index = model.Index(kind="OK", properties=(), ancestor=False) + assert index.kind == "OK" + + @staticmethod + def test_properties(): + index_prop1 = model.IndexProperty(name="a", direction="asc") + index_prop2 = model.IndexProperty(name="b", direction="desc") + index = model.Index( + kind="F", properties=(index_prop1, index_prop2), ancestor=False + ) + assert index.properties == (index_prop1, index_prop2) + + @staticmethod + def test_ancestor(): + index = model.Index(kind="LK", properties=(), ancestor=True) + assert index.ancestor + + @staticmethod + def test___repr__(): + index_prop = model.IndexProperty(name="a", direction="asc") + index = model.Index(kind="IndK", properties=[index_prop], ancestor=False) + expected = "Index(kind='IndK', properties=[{!r}], ancestor=False)" + expected = expected.format(index_prop) + assert repr(index) == expected + + @staticmethod + def test___eq__(): + index_props = (model.IndexProperty(name="a", direction="asc"),) + index1 = model.Index(kind="d", properties=index_props, ancestor=False) + index2 = model.Index(kind="d", properties=(), ancestor=False) + index3 = model.Index(kind="d", properties=index_props, ancestor=True) + index4 = model.Index(kind="e", properties=index_props, ancestor=False) + index5 = mock.sentinel.index + assert index1 == index1 + assert not index1 == index2 + assert not index1 == index3 + assert not index1 == index4 + assert not index1 == index5 + + @staticmethod + def test___ne__(): + index_props = (model.IndexProperty(name="a", direction="asc"),) + index1 = model.Index(kind="d", properties=index_props, ancestor=False) + index2 = model.Index(kind="d", properties=(), ancestor=False) + index3 = model.Index(kind="d", properties=index_props, ancestor=True) + index4 = model.Index(kind="e", properties=index_props, ancestor=False) + index5 = mock.sentinel.index + index6 = model.Index(kind="d", properties=index_props, ancestor=False) + assert not index1 != index1 + assert index1 != index2 + assert index1 != index3 + assert index1 != index4 + assert index1 != index5 + assert not index1 != index6 + + @staticmethod + def test___hash__(): + index_props = (model.IndexProperty(name="a", direction="asc"),) + index1 = model.Index(kind="d", properties=index_props, ancestor=False) + index2 = model.Index(kind="d", properties=index_props, ancestor=False) + assert index1 is not index2 + assert hash(index1) == hash(index2) + assert hash(index1) == hash(("d", index_props, False)) + + +class TestIndexState: + INDEX = mock.sentinel.index + + def test_constructor(self): + index_state = model.IndexState(definition=self.INDEX, state="error", id=42) + assert index_state._definition is self.INDEX + assert index_state._state == "error" + assert index_state._id == 42 + + def test_definition(self): + index_state = model.IndexState(definition=self.INDEX, state="serving", id=1) + assert index_state.definition is self.INDEX + + @staticmethod + def test_state(): + index_state = model.IndexState(definition=None, state="deleting", id=1) + assert index_state.state == "deleting" + + @staticmethod + def test_id(): + index_state = model.IndexState(definition=None, state="error", id=1001) + assert index_state.id == 1001 + + @staticmethod + def test___repr__(): + index_prop = model.IndexProperty(name="a", direction="asc") + index = model.Index(kind="IndK", properties=[index_prop], ancestor=False) + index_state = model.IndexState(definition=index, state="building", id=1337) + expected = ( + "IndexState(definition=Index(kind='IndK', properties=[" + "IndexProperty(name='a', direction='asc')], ancestor=False), " + "state='building', id=1337)" + ) + assert repr(index_state) == expected + + def test___eq__(self): + index_state1 = model.IndexState(definition=self.INDEX, state="error", id=20) + index_state2 = model.IndexState( + definition=mock.sentinel.not_index, state="error", id=20 + ) + index_state3 = model.IndexState(definition=self.INDEX, state="serving", id=20) + index_state4 = model.IndexState(definition=self.INDEX, state="error", id=80) + index_state5 = mock.sentinel.index_state + assert index_state1 == index_state1 + assert not index_state1 == index_state2 + assert not index_state1 == index_state3 + assert not index_state1 == index_state4 + assert not index_state1 == index_state5 + + def test___ne__(self): + index_state1 = model.IndexState(definition=self.INDEX, state="error", id=20) + index_state2 = model.IndexState( + definition=mock.sentinel.not_index, state="error", id=20 + ) + index_state3 = model.IndexState(definition=self.INDEX, state="serving", id=20) + index_state4 = model.IndexState(definition=self.INDEX, state="error", id=80) + index_state5 = mock.sentinel.index_state + index_state6 = model.IndexState(definition=self.INDEX, state="error", id=20) + assert not index_state1 != index_state1 + assert index_state1 != index_state2 + assert index_state1 != index_state3 + assert index_state1 != index_state4 + assert index_state1 != index_state5 + assert not index_state1 != index_state6 + + def test___hash__(self): + index_state1 = model.IndexState(definition=self.INDEX, state="error", id=88) + index_state2 = model.IndexState(definition=self.INDEX, state="error", id=88) + assert index_state1 is not index_state2 + assert hash(index_state1) == hash(index_state2) + assert hash(index_state1) == hash((self.INDEX, "error", 88)) + + +class TestModelAdapter: + @staticmethod + def test_constructor(): + with pytest.raises(NotImplementedError): + model.ModelAdapter() + + +def test_make_connection(): + with pytest.raises(NotImplementedError): + model.make_connection() + + +class TestModelAttribute: + @staticmethod + def test_constructor(): + attr = model.ModelAttribute() + assert isinstance(attr, model.ModelAttribute) + + @staticmethod + def test__fix_up(): + attr = model.ModelAttribute() + assert attr._fix_up(model.Model, "birthdate") is None + + +class Test_BaseValue: + @staticmethod + def test_constructor(): + wrapped = model._BaseValue(17) + assert wrapped.b_val == 17 + + @staticmethod + def test_constructor_invalid_input(): + with pytest.raises(TypeError): + model._BaseValue(None) + with pytest.raises(TypeError): + model._BaseValue([1, 2]) + + @staticmethod + def test___repr__(): + wrapped = model._BaseValue("abc") + assert repr(wrapped) == "_BaseValue('abc')" + + @staticmethod + def test___eq__(): + wrapped1 = model._BaseValue("one val") + wrapped2 = model._BaseValue(25.5) + wrapped3 = mock.sentinel.base_value + assert wrapped1 == wrapped1 + assert not wrapped1 == wrapped2 + assert not wrapped1 == wrapped3 + + @staticmethod + def test___ne__(): + wrapped1 = model._BaseValue("one val") + wrapped2 = model._BaseValue(25.5) + wrapped3 = mock.sentinel.base_value + wrapped4 = model._BaseValue("one val") + assert not wrapped1 != wrapped1 + assert wrapped1 != wrapped2 + assert wrapped1 != wrapped3 + assert not wrapped1 != wrapped4 + + @staticmethod + def test___hash__(): + wrapped = model._BaseValue((11, 12, 88)) + with pytest.raises(TypeError): + hash(wrapped) + + +class TestProperty: + @staticmethod + def test_constructor_defaults(): + prop = model.Property() + # Check that none of the constructor defaults were used. + assert prop.__dict__ == {} + + @staticmethod + def _example_validator(prop, value): + return value.lower() + + def test__example_validator(self): + value = "AbCde" + validated = self._example_validator(None, value) + assert validated == "abcde" + assert self._example_validator(None, validated) == "abcde" + + def test_constructor_explicit(self): + prop = model.Property( + name="val", + indexed=False, + repeated=False, + required=True, + default="zorp", + choices=("zorp", "zap", "zip"), + validator=self._example_validator, + verbose_name="VALUE FOR READING", + write_empty_list=False, + ) + assert prop._name == "val" + assert not prop._indexed + assert not prop._repeated + assert prop._required + assert prop._default == "zorp" + assert prop._choices == frozenset(("zorp", "zap", "zip")) + assert prop._validator is self._example_validator + assert prop._verbose_name == "VALUE FOR READING" + assert not prop._write_empty_list + + @staticmethod + def test_constructor_invalid_name(): + with pytest.raises(TypeError): + model.Property(name=["not", "a", "string"]) + with pytest.raises(ValueError): + model.Property(name="has.a.dot") + + @staticmethod + def test_constructor_repeated_not_allowed(): + with pytest.raises(ValueError): + model.Property(name="a", repeated=True, required=True) + with pytest.raises(ValueError): + model.Property(name="b", repeated=True, default="zim") + + @staticmethod + def test_constructor_invalid_choices(): + with pytest.raises(TypeError): + model.Property(name="a", choices={"wrong": "container"}) + + @staticmethod + def test_constructor_invalid_validator(): + with pytest.raises(TypeError): + model.Property(name="a", validator=mock.sentinel.validator) + + def test_repr(self): + prop = model.Property( + "val", + indexed=False, + repeated=False, + required=True, + default="zorp", + choices=("zorp", "zap", "zip"), + validator=self._example_validator, + verbose_name="VALUE FOR READING", + write_empty_list=False, + ) + expected = ( + "Property('val', indexed=False, required=True, " + "default='zorp', choices={}, validator={}, " + "verbose_name='VALUE FOR READING')".format(prop._choices, prop._validator) + ) + assert repr(prop) == expected + + @staticmethod + def test_repr_subclass(): + class SimpleProperty(model.Property): + _foo_type = None + _bar = "eleventy" + + @ndb_utils.positional(1) + def __init__(self, foo_type, bar): + self._foo_type = foo_type + self._bar = bar + + prop = SimpleProperty(foo_type=list, bar="nope") + assert repr(prop) == "SimpleProperty(foo_type=list, bar='nope')" + + @staticmethod + def test__datastore_type(): + prop = model.Property("foo") + value = mock.sentinel.value + assert prop._datastore_type(value) is value + + @staticmethod + def test__comparison_indexed(): + prop = model.Property("color", indexed=False) + with pytest.raises(exceptions.BadFilterError): + prop._comparison("!=", "red") + + @staticmethod + def test__comparison(): + prop = model.Property("sentiment", indexed=True) + filter_node = prop._comparison(">=", 0.0) + assert filter_node == query_module.FilterNode("sentiment", ">=", 0.0) + + @staticmethod + def test__comparison_empty_value(): + prop = model.Property("height", indexed=True) + filter_node = prop._comparison("=", None) + assert filter_node == query_module.FilterNode("height", "=", None) + # Cache is untouched. + assert model.Property._FIND_METHODS_CACHE == {} + + @staticmethod + def test___eq__(): + prop = model.Property("name", indexed=True) + value = 1337 + expected = query_module.FilterNode("name", "=", value) + + filter_node_left = prop == value + assert filter_node_left == expected + filter_node_right = value == prop + assert filter_node_right == expected + + @staticmethod + def test___ne__(): + prop = model.Property("name", indexed=True) + value = 7.0 + expected = query_module.FilterNode("name", "!=", value) + + ne_node_left = prop != value + assert ne_node_left == expected + ne_node_right = value != prop + assert ne_node_right == expected + + @staticmethod + def test___lt__(): + prop = model.Property("name", indexed=True) + value = 2.0 + expected = query_module.FilterNode("name", "<", value) + + filter_node_left = prop < value + assert filter_node_left == expected + filter_node_right = value > prop + assert filter_node_right == expected + + @staticmethod + def test___le__(): + prop = model.Property("name", indexed=True) + value = 20.0 + expected = query_module.FilterNode("name", "<=", value) + + filter_node_left = prop <= value + assert filter_node_left == expected + filter_node_right = value >= prop + assert filter_node_right == expected + + @staticmethod + def test___gt__(): + prop = model.Property("name", indexed=True) + value = "new" + expected = query_module.FilterNode("name", ">", value) + + filter_node_left = prop > value + assert filter_node_left == expected + filter_node_right = value < prop + assert filter_node_right == expected + + @staticmethod + def test___ge__(): + prop = model.Property("name", indexed=True) + value = "old" + expected = query_module.FilterNode("name", ">=", value) + + filter_node_left = prop >= value + assert filter_node_left == expected + filter_node_right = value <= prop + assert filter_node_right == expected + + @staticmethod + def test__IN_not_indexed(): + prop = model.Property("name", indexed=False) + with pytest.raises(exceptions.BadFilterError): + prop._IN([10, 20, 81]) + + # Cache is untouched. + assert model.Property._FIND_METHODS_CACHE == {} + + @staticmethod + def test__IN_wrong_container(): + prop = model.Property("name", indexed=True) + with pytest.raises(exceptions.BadArgumentError): + prop._IN({1: "a", 11: "b"}) + + # Cache is untouched. + assert model.Property._FIND_METHODS_CACHE == {} + + @staticmethod + def test__IN_default(): + prop = model.Property("name", indexed=True) + or_node = prop._IN(["a", None, "xy"]) + expected = query_module.DisjunctionNode( + query_module.FilterNode("name", "=", "a"), + query_module.FilterNode("name", "=", None), + query_module.FilterNode("name", "=", "xy"), + ) + assert or_node == expected + # Also verify the alias + assert or_node == prop.IN(["a", None, "xy"]) + + @staticmethod + def test__IN_client(): + prop = model.Property("name", indexed=True) + or_node = prop._IN(["a", None, "xy"], server_op=False) + expected = query_module.DisjunctionNode( + query_module.FilterNode("name", "=", "a"), + query_module.FilterNode("name", "=", None), + query_module.FilterNode("name", "=", "xy"), + ) + assert or_node == expected + # Also verify the alias + assert or_node == prop.IN(["a", None, "xy"]) + + @staticmethod + def test__IN_server(): + prop = model.Property("name", indexed=True) + in_node = prop._IN(["a", None, "xy"], server_op=True) + assert in_node == prop.IN(["a", None, "xy"], server_op=True) + assert in_node != query_module.DisjunctionNode( + query_module.FilterNode("name", "=", "a"), + query_module.FilterNode("name", "=", None), + query_module.FilterNode("name", "=", "xy"), + ) + assert in_node == query_module.FilterNode( + "name", "in", ["a", None, "xy"], server_op=True + ) + + @staticmethod + def test__NOT_IN(): + prop = model.Property("name", indexed=True) + not_in_node = prop._NOT_IN(["a", None, "xy"]) + assert not_in_node == prop.NOT_IN(["a", None, "xy"]) + assert not_in_node == query_module.FilterNode( + "name", "not_in", ["a", None, "xy"] + ) + + @staticmethod + def test___neg__(): + prop = model.Property("name") + order = -prop + assert isinstance(order, query_module.PropertyOrder) + assert order.name == "name" + assert order.reverse is True + order = -order + assert order.reverse is False + + @staticmethod + def test___pos__(): + prop = model.Property("name") + order = +prop + assert isinstance(order, query_module.PropertyOrder) + assert order.name == "name" + assert order.reverse is False + + @staticmethod + def test__do_validate(): + validator = mock.Mock(spec=()) + value = 18 + choices = (1, 2, validator.return_value) + + prop = model.Property(name="foo", validator=validator, choices=choices) + result = prop._do_validate(value) + assert result is validator.return_value + # Check validator call. + validator.assert_called_once_with(prop, value) + + @staticmethod + def test__do_validate_base_value(): + value = model._BaseValue(b"\x00\x01") + + prop = model.Property(name="foo") + result = prop._do_validate(value) + assert result is value + # Cache is untouched. + assert model.Property._FIND_METHODS_CACHE == {} + + @staticmethod + def test__do_validate_validator_none(): + validator = mock.Mock(spec=(), return_value=None) + value = 18 + + prop = model.Property(name="foo", validator=validator) + result = prop._do_validate(value) + assert result == value + # Check validator call. + validator.assert_called_once_with(prop, value) + + @staticmethod + def test__do_validate_not_in_choices(): + value = 18 + prop = model.Property(name="foo", choices=(1, 2)) + + with pytest.raises(exceptions.BadValueError): + prop._do_validate(value) + + @staticmethod + def test__do_validate_call_validation(): + class SimpleProperty(model.Property): + def _validate(self, value): + value.append("SimpleProperty._validate") + return value + + value = [] + prop = SimpleProperty(name="foo") + result = prop._do_validate(value) + assert result is value + assert value == ["SimpleProperty._validate"] + + @staticmethod + def test__fix_up(): + prop = model.Property(name="foo") + assert prop._code_name is None + prop._fix_up(None, "bar") + assert prop._code_name == "bar" + + @staticmethod + def test__fix_up_no_name(): + prop = model.Property() + assert prop._name is None + assert prop._code_name is None + + prop._fix_up(None, "both") + assert prop._code_name == "both" + assert prop._name == "both" + + @staticmethod + def test__store_value(): + entity = mock.Mock(_values={}, spec=("_values",)) + prop = model.Property(name="foo") + prop._store_value(entity, mock.sentinel.value) + assert entity._values == {prop._name: mock.sentinel.value} + + @staticmethod + def test__set_value(): + entity = mock.Mock( + _projection=None, _values={}, spec=("_projection", "_values") + ) + prop = model.Property(name="foo", repeated=False) + prop._set_value(entity, 19) + assert entity._values == {prop._name: 19} + + @staticmethod + def test__set_value_none(): + entity = mock.Mock( + _projection=None, _values={}, spec=("_projection", "_values") + ) + prop = model.Property(name="foo", repeated=False) + prop._set_value(entity, None) + assert entity._values == {prop._name: None} + # Cache is untouched. + assert model.Property._FIND_METHODS_CACHE == {} + + @staticmethod + def test__set_value_repeated(): + entity = mock.Mock( + _projection=None, _values={}, spec=("_projection", "_values") + ) + prop = model.Property(name="foo", repeated=True) + prop._set_value(entity, (11, 12, 13)) + assert entity._values == {prop._name: [11, 12, 13]} + + @staticmethod + def test__set_value_repeated_bad_container(): + entity = mock.Mock( + _projection=None, _values={}, spec=("_projection", "_values") + ) + prop = model.Property(name="foo", repeated=True) + with pytest.raises(exceptions.BadValueError): + prop._set_value(entity, None) + # Cache is untouched. + assert model.Property._FIND_METHODS_CACHE == {} + + @staticmethod + def test__set_value_projection(): + entity = mock.Mock(_projection=("a", "b"), spec=("_projection",)) + prop = model.Property(name="foo", repeated=True) + with pytest.raises(model.ReadonlyPropertyError): + prop._set_value(entity, None) + # Cache is untouched. + assert model.Property._FIND_METHODS_CACHE == {} + + @staticmethod + def test__has_value(): + prop = model.Property(name="foo") + values = {prop._name: 88} + entity1 = mock.Mock(_values=values, spec=("_values",)) + entity2 = mock.Mock(_values={}, spec=("_values",)) + + assert prop._has_value(entity1) + assert not prop._has_value(entity2) + + @staticmethod + def test__retrieve_value(): + prop = model.Property(name="foo") + values = {prop._name: b"\x00\x01"} + entity1 = mock.Mock(_values=values, spec=("_values",)) + entity2 = mock.Mock(_values={}, spec=("_values",)) + + assert prop._retrieve_value(entity1) == b"\x00\x01" + assert prop._retrieve_value(entity2) is None + assert prop._retrieve_value(entity2, default=b"zip") == b"zip" + + @staticmethod + def test__get_user_value(): + prop = model.Property(name="prop") + value = b"\x00\x01" + values = {prop._name: value} + entity = mock.Mock(_values=values, spec=("_values",)) + assert value is prop._get_user_value(entity) + # Cache is untouched. + assert model.Property._FIND_METHODS_CACHE == {} + + @staticmethod + def test__get_user_value_wrapped(): + class SimpleProperty(model.Property): + def _from_base_type(self, value): + return value * 2.0 + + prop = SimpleProperty(name="prop") + values = {prop._name: model._BaseValue(9.5)} + entity = mock.Mock(_values=values, spec=("_values",)) + assert prop._get_user_value(entity) == 19.0 + + @staticmethod + def test__get_base_value(): + class SimpleProperty(model.Property): + def _validate(self, value): + return value + 1 + + prop = SimpleProperty(name="prop") + values = {prop._name: 20} + entity = mock.Mock(_values=values, spec=("_values",)) + assert prop._get_base_value(entity) == model._BaseValue(21) + + @staticmethod + def test__get_base_value_wrapped(): + prop = model.Property(name="prop") + value = model._BaseValue(b"\x00\x01") + values = {prop._name: value} + entity = mock.Mock(_values=values, spec=("_values",)) + assert value is prop._get_base_value(entity) + # Cache is untouched. + assert model.Property._FIND_METHODS_CACHE == {} + + @staticmethod + def test__get_base_value_unwrapped_as_list(): + class SimpleProperty(model.Property): + def _validate(self, value): + return value + 11 + + prop = SimpleProperty(name="prop", repeated=False) + values = {prop._name: 20} + entity = mock.Mock(_values=values, spec=("_values",)) + assert prop._get_base_value_unwrapped_as_list(entity) == [31] + + @staticmethod + def test__get_base_value_unwrapped_as_list_empty(): + prop = model.Property(name="prop", repeated=False) + entity = mock.Mock(_values={}, spec=("_values",)) + assert prop._get_base_value_unwrapped_as_list(entity) == [None] + # Cache is untouched. + assert model.Property._FIND_METHODS_CACHE == {} + + @staticmethod + def test__get_base_value_unwrapped_as_list_repeated(): + class SimpleProperty(model.Property): + def _validate(self, value): + return value / 10.0 + + prop = SimpleProperty(name="prop", repeated=True) + values = {prop._name: [20, 30, 40]} + entity = mock.Mock(_values=values, spec=("_values",)) + expected = [2.0, 3.0, 4.0] + assert prop._get_base_value_unwrapped_as_list(entity) == expected + + @staticmethod + def test__opt_call_from_base_type(): + prop = model.Property(name="prop") + value = b"\x00\x01" + assert value is prop._opt_call_from_base_type(value) + # Cache is untouched. + assert model.Property._FIND_METHODS_CACHE == {} + + @staticmethod + def test__opt_call_from_base_type_wrapped(): + class SimpleProperty(model.Property): + def _from_base_type(self, value): + return value * 2.0 + + prop = SimpleProperty(name="prop") + value = model._BaseValue(8.5) + assert prop._opt_call_from_base_type(value) == 17.0 + + @staticmethod + def test__value_to_repr(): + class SimpleProperty(model.Property): + def _from_base_type(self, value): + return value * 3.0 + + prop = SimpleProperty(name="prop") + value = model._BaseValue(9.25) + assert prop._value_to_repr(value) == "27.75" + + @staticmethod + def test__opt_call_to_base_type(): + class SimpleProperty(model.Property): + def _validate(self, value): + return value + 1 + + prop = SimpleProperty(name="prop") + value = 17 + result = prop._opt_call_to_base_type(value) + assert result == model._BaseValue(value + 1) + + @staticmethod + def test__opt_call_to_base_type_wrapped(): + prop = model.Property(name="prop") + value = model._BaseValue(b"\x00\x01") + assert value is prop._opt_call_to_base_type(value) + # Cache is untouched. + assert model.Property._FIND_METHODS_CACHE == {} + + @staticmethod + def test__call_from_base_type(): + class SimpleProperty(model.Property): + def _from_base_type(self, value): + value.append("SimpleProperty._from_base_type") + return value + + prop = SimpleProperty(name="prop") + value = [] + assert value is prop._call_from_base_type(value) + assert value == ["SimpleProperty._from_base_type"] + + @staticmethod + def _property_subtype_chain(): + class A(model.Property): + def _validate(self, value): + value.append("A._validate") + return value + + def _to_base_type(self, value): + value.append("A._to_base_type") + return value + + class B(A): + def _validate(self, value): + value.append("B._validate") + return value + + def _to_base_type(self, value): + value.append("B._to_base_type") + return value + + class C(B): + def _validate(self, value): + value.append("C._validate") + return value + + value = [] + + prop_a = A(name="name-a") + assert value is prop_a._validate(value) + assert value == ["A._validate"] + assert value is prop_a._to_base_type(value) + assert value == ["A._validate", "A._to_base_type"] + prop_b = B(name="name-b") + assert value is prop_b._validate(value) + assert value == ["A._validate", "A._to_base_type", "B._validate"] + assert value is prop_b._to_base_type(value) + assert value == [ + "A._validate", + "A._to_base_type", + "B._validate", + "B._to_base_type", + ] + prop_c = C(name="name-c") + assert value is prop_c._validate(value) + assert value == [ + "A._validate", + "A._to_base_type", + "B._validate", + "B._to_base_type", + "C._validate", + ] + + return A, B, C + + def test__call_to_base_type(self): + _, _, PropertySubclass = self._property_subtype_chain() + prop = PropertySubclass(name="prop") + value = [] + assert value is prop._call_to_base_type(value) + assert value == [ + "C._validate", + "B._validate", + "B._to_base_type", + "A._validate", + "A._to_base_type", + ] + + def test__call_shallow_validation(self): + _, _, PropertySubclass = self._property_subtype_chain() + prop = PropertySubclass(name="prop") + value = [] + assert value is prop._call_shallow_validation(value) + assert value == ["C._validate", "B._validate"] + + @staticmethod + def test__call_shallow_validation_no_break(): + class SimpleProperty(model.Property): + def _validate(self, value): + value.append("SimpleProperty._validate") + return value + + prop = SimpleProperty(name="simple") + value = [] + assert value is prop._call_shallow_validation(value) + assert value == ["SimpleProperty._validate"] + + @staticmethod + def _property_subtype(): + class SomeProperty(model.Property): + def find_me(self): + return self._name + + def IN(self): + return len(self._name) < 20 + + prop = SomeProperty(name="hi") + assert prop.find_me() == "hi" + assert prop.IN() + + return SomeProperty + + def test__find_methods(self): + SomeProperty = self._property_subtype() + # Make sure cache is empty. + assert model.Property._FIND_METHODS_CACHE == {} + + methods = SomeProperty._find_methods("IN", "find_me") + expected = [SomeProperty.IN, SomeProperty.find_me, model.Property.IN] + assert methods == expected + # Check cache + key = "{}.{}".format(SomeProperty.__module__, SomeProperty.__name__) + assert model.Property._FIND_METHODS_CACHE == {key: {("IN", "find_me"): methods}} + + def test__find_methods_reverse(self): + SomeProperty = self._property_subtype() + # Make sure cache is empty. + assert model.Property._FIND_METHODS_CACHE == {} + + methods = SomeProperty._find_methods("IN", "find_me", reverse=True) + expected = [model.Property.IN, SomeProperty.find_me, SomeProperty.IN] + assert methods == expected + # Check cache + key = "{}.{}".format(SomeProperty.__module__, SomeProperty.__name__) + assert model.Property._FIND_METHODS_CACHE == { + key: {("IN", "find_me"): list(reversed(methods))} + } + + def test__find_methods_cached(self): + SomeProperty = self._property_subtype() + # Set cache + methods = mock.sentinel.methods + key = "{}.{}".format(SomeProperty.__module__, SomeProperty.__name__) + model.Property._FIND_METHODS_CACHE = {key: {("IN", "find_me"): methods}} + assert SomeProperty._find_methods("IN", "find_me") is methods + + def test__find_methods_cached_reverse(self): + SomeProperty = self._property_subtype() + # Set cache + methods = ["a", "b"] + key = "{}.{}".format(SomeProperty.__module__, SomeProperty.__name__) + model.Property._FIND_METHODS_CACHE = {key: {("IN", "find_me"): methods}} + assert SomeProperty._find_methods("IN", "find_me", reverse=True) == [ + "b", + "a", + ] + + @staticmethod + def test__apply_list(): + method1 = mock.Mock(spec=()) + method2 = mock.Mock(spec=(), return_value=None) + method3 = mock.Mock(spec=()) + + prop = model.Property(name="benji") + to_call = prop._apply_list([method1, method2, method3]) + assert isinstance(to_call, types.FunctionType) + + value = mock.sentinel.value + result = to_call(value) + assert result is method3.return_value + + # Check mocks. + method1.assert_called_once_with(prop, value) + method2.assert_called_once_with(prop, method1.return_value) + method3.assert_called_once_with(prop, method1.return_value) + + @staticmethod + def test__apply_to_values(): + value = "foo" + prop = model.Property(name="bar", repeated=False) + entity = mock.Mock(_values={prop._name: value}, spec=("_values",)) + function = mock.Mock(spec=(), return_value="foo2") + + result = prop._apply_to_values(entity, function) + assert result == function.return_value + assert entity._values == {prop._name: result} + # Check mocks. + function.assert_called_once_with(value) + + @staticmethod + def test__apply_to_values_when_none(): + prop = model.Property(name="bar", repeated=False, default=None) + entity = mock.Mock(_values={}, spec=("_values",)) + function = mock.Mock(spec=()) + + result = prop._apply_to_values(entity, function) + assert result is None + assert entity._values == {} + # Check mocks. + function.assert_not_called() + + @staticmethod + def test__apply_to_values_transformed_none(): + value = 7.5 + prop = model.Property(name="bar", repeated=False) + entity = mock.Mock(_values={prop._name: value}, spec=("_values",)) + function = mock.Mock(spec=(), return_value=None) + + result = prop._apply_to_values(entity, function) + assert result == value + assert entity._values == {prop._name: result} + # Check mocks. + function.assert_called_once_with(value) + + @staticmethod + def test__apply_to_values_transformed_unchanged(): + value = mock.sentinel.value + prop = model.Property(name="bar", repeated=False) + entity = mock.Mock(_values={prop._name: value}, spec=("_values",)) + function = mock.Mock(spec=(), return_value=value) + + result = prop._apply_to_values(entity, function) + assert result == value + assert entity._values == {prop._name: result} + # Check mocks. + function.assert_called_once_with(value) + + @staticmethod + def test__apply_to_values_repeated(): + value = [1, 2, 3] + prop = model.Property(name="bar", repeated=True) + entity = mock.Mock(_values={prop._name: value}, spec=("_values",)) + function = mock.Mock(spec=(), return_value=42) + + result = prop._apply_to_values(entity, function) + assert result == [ + function.return_value, + function.return_value, + function.return_value, + ] + assert result is value # Check modify in-place. + assert entity._values == {prop._name: result} + # Check mocks. + assert function.call_count == 3 + calls = [mock.call(1), mock.call(2), mock.call(3)] + function.assert_has_calls(calls) + + @staticmethod + def test__apply_to_values_repeated_when_none(): + prop = model.Property(name="bar", repeated=True, default=None) + entity = mock.Mock(_values={}, spec=("_values",)) + function = mock.Mock(spec=()) + + result = prop._apply_to_values(entity, function) + assert result == [] + assert entity._values == {prop._name: result} + # Check mocks. + function.assert_not_called() + + @staticmethod + def test__get_value(): + prop = model.Property(name="prop") + value = b"\x00\x01" + values = {prop._name: value} + entity = mock.Mock( + _projection=None, _values=values, spec=("_projection", "_values") + ) + assert value is prop._get_value(entity) + # Cache is untouched. + assert model.Property._FIND_METHODS_CACHE == {} + + @staticmethod + def test__get_value_projected_present(): + prop = model.Property(name="prop") + value = 92.5 + values = {prop._name: value} + entity = mock.Mock( + _projection=(prop._name,), + _values=values, + spec=("_projection", "_values"), + ) + assert value is prop._get_value(entity) + # Cache is untouched. + assert model.Property._FIND_METHODS_CACHE == {} + + @staticmethod + def test__get_value_projected_absent(): + prop = model.Property(name="prop") + entity = mock.Mock(_projection=("nope",), spec=("_projection",)) + with pytest.raises(model.UnprojectedPropertyError): + prop._get_value(entity) + # Cache is untouched. + assert model.Property._FIND_METHODS_CACHE == {} + + @staticmethod + def test__delete_value(): + prop = model.Property(name="prop") + value = b"\x00\x01" + values = {prop._name: value} + entity = mock.Mock(_values=values, spec=("_values",)) + prop._delete_value(entity) + assert values == {} + + @staticmethod + def test__delete_value_no_op(): + prop = model.Property(name="prop") + values = {} + entity = mock.Mock(_values=values, spec=("_values",)) + prop._delete_value(entity) + assert values == {} + + @staticmethod + def test__is_initialized_not_required(): + prop = model.Property(name="prop", required=False) + entity = mock.sentinel.entity + assert prop._is_initialized(entity) + # Cache is untouched. + assert model.Property._FIND_METHODS_CACHE == {} + + @staticmethod + def test__is_initialized_default_fallback(): + prop = model.Property(name="prop", required=True, default=11111) + values = {} + entity = mock.Mock( + _projection=None, _values=values, spec=("_projection", "_values") + ) + assert prop._is_initialized(entity) + # Cache is untouched. + assert model.Property._FIND_METHODS_CACHE == {} + + @staticmethod + def test__is_initialized_set_to_none(): + prop = model.Property(name="prop", required=True) + values = {prop._name: None} + entity = mock.Mock( + _projection=None, _values=values, spec=("_projection", "_values") + ) + assert not prop._is_initialized(entity) + # Cache is untouched. + assert model.Property._FIND_METHODS_CACHE == {} + + @staticmethod + def test_instance_descriptors(): + class Model(object): + prop = model.Property(name="prop", required=True) + + def __init__(self): + self._projection = None + self._values = {} + + m = Model() + value = 1234.5 + # __set__ + m.prop = value + assert m._values == {"prop": value} + # __get__ + assert m.prop == value + # __delete__ + del m.prop + assert m._values == {} + + @staticmethod + def test_class_descriptors(): + prop = model.Property(name="prop", required=True) + + class Model: + prop2 = prop + + assert Model.prop2 is prop + + @staticmethod + def test__serialize(): + prop = model.Property(name="prop") + with pytest.raises(NotImplementedError): + prop._serialize(None, None) + + @staticmethod + def test__deserialize(): + prop = model.Property(name="prop") + with pytest.raises(NotImplementedError): + prop._deserialize(None, None) + + @staticmethod + def test__prepare_for_put(): + prop = model.Property(name="prop") + assert prop._prepare_for_put(None) is None + + @staticmethod + def test__check_property(): + prop = model.Property(name="prop") + assert prop._check_property() is None + + @staticmethod + def test__check_property_not_indexed(): + prop = model.Property(name="prop", indexed=False) + with pytest.raises(model.InvalidPropertyError): + prop._check_property(require_indexed=True) + + @staticmethod + def test__check_property_with_subproperty(): + prop = model.Property(name="prop", indexed=True) + with pytest.raises(model.InvalidPropertyError): + prop._check_property(rest="a.b.c") + + @staticmethod + def test__get_for_dict(): + prop = model.Property(name="prop") + value = b"\x00\x01" + values = {prop._name: value} + entity = mock.Mock( + _projection=None, _values=values, spec=("_projection", "_values") + ) + assert value is prop._get_for_dict(entity) + # Cache is untouched. + assert model.Property._FIND_METHODS_CACHE == {} + + @staticmethod + def test__to_datastore(): + class SomeKind(model.Model): + prop = model.Property() + + entity = SomeKind(prop="foo") + data = {} + assert SomeKind.prop._to_datastore(entity, data) == ("prop",) + assert data == {"prop": "foo"} + + @staticmethod + def test__to_datastore_prop_is_repeated(): + class SomeKind(model.Model): + prop = model.Property(repeated=True) + + entity = SomeKind(prop=["foo", "bar"]) + data = {} + assert SomeKind.prop._to_datastore(entity, data) == ("prop",) + assert data == {"prop": ["foo", "bar"]} + + @staticmethod + def test__to_datastore_w_prefix(): + class SomeKind(model.Model): + prop = model.Property() + + entity = SomeKind(prop="foo") + data = {} + assert SomeKind.prop._to_datastore(entity, data, prefix="pre.") == ("pre.prop",) + assert data == {"pre.prop": "foo"} + + @staticmethod + def test__to_datastore_w_prefix_ancestor_repeated(): + class SomeKind(model.Model): + prop = model.Property() + + entity = SomeKind(prop="foo") + data = {} + assert SomeKind.prop._to_datastore( + entity, data, prefix="pre.", repeated=True + ) == ("pre.prop",) + assert data == {"pre.prop": ["foo"]} + + +class Test__validate_key: + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_valid_value(): + value = model.Key("This", 1) + result = model._validate_key(value) + assert result is value + + @staticmethod + def test_invalid_value(): + with pytest.raises(exceptions.BadValueError): + model._validate_key(None) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_unchecked_model_type(): + value = model.Key("This", 1) + entity = model.Model() + + result = model._validate_key(value, entity=entity) + assert result is value + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_unchecked_expando_type(): + value = model.Key("This", 1) + entity = model.Expando() + + result = model._validate_key(value, entity=entity) + assert result is value + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_same_kind(): + class Mine(model.Model): + pass + + value = model.Key(Mine, "yours") + entity = mock.Mock(spec=Mine) + entity._get_kind.return_value = "Mine" + + result = model._validate_key(value, entity=entity) + assert result is value + entity._get_kind.assert_called_once_with() + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_different_kind(): + class Mine(model.Model): + pass + + value = model.Key(Mine, "yours") + entity = mock.Mock(spec=Mine) + entity._get_kind.return_value = "NotMine" + + with pytest.raises(model.KindError): + model._validate_key(value, entity=entity) + + calls = [mock.call(), mock.call()] + entity._get_kind.assert_has_calls(calls) + + +class TestModelKey: + @staticmethod + def test_constructor(): + prop = model.ModelKey() + assert prop._name == "__key__" + assert prop.__dict__ == {"_name": "__key__"} + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_compare_valid(): + prop = model.ModelKey() + value = key_module.Key("say", "quay") + filter_node = prop._comparison(">=", value) + assert filter_node == query_module.FilterNode("__key__", ">=", value) + + @staticmethod + def test_compare_invalid(): + prop = model.ModelKey() + with pytest.raises(exceptions.BadValueError): + prop == None # noqa: E711 + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test__validate(): + prop = model.ModelKey() + value = key_module.Key("Up", 909) + assert prop._validate(value) is value + + @staticmethod + def test__validate_wrong_type(): + prop = model.ModelKey() + with pytest.raises(exceptions.BadValueError): + prop._validate(None) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test__set_value(): + entity = model.Model() + value = key_module.Key("Map", 8898) + + model.ModelKey._set_value(entity, value) + assert entity._entity_key is value + + @staticmethod + def test__set_value_none(): + entity = mock.Mock(spec=("_entity_key",)) + + assert entity._entity_key is not None + model.ModelKey._set_value(entity, None) + assert entity._entity_key is None + + @staticmethod + def test__get_value(): + entity = mock.Mock(spec=("_entity_key",)) + + result = model.ModelKey._get_value(entity) + assert result is entity._entity_key + + @staticmethod + def test__delete_value(): + entity = mock.Mock(spec=("_entity_key",)) + + assert entity._entity_key is not None + model.ModelKey._delete_value(entity) + assert entity._entity_key is None + + +class TestBooleanProperty: + @staticmethod + def test__validate(): + prop = model.BooleanProperty(name="certify") + value = True + assert prop._validate(value) is value + + @staticmethod + def test__validate_bad_value(): + prop = model.BooleanProperty(name="certify") + with pytest.raises(exceptions.BadValueError): + prop._validate(None) + + @staticmethod + def test__db_set_value(): + prop = model.BooleanProperty(name="certify") + with pytest.raises(NotImplementedError): + prop._db_set_value(None, None, None) + + @staticmethod + def test__db_get_value(): + prop = model.BooleanProperty(name="certify") + with pytest.raises(NotImplementedError): + prop._db_get_value(None, None) + + @staticmethod + def test__from_base_type_bool(): + prop = model.BooleanProperty(name="certify") + assert prop._from_base_type(True) is None + + @staticmethod + def test__from_base_type_int(): + prop = model.BooleanProperty(name="certify") + assert prop._from_base_type(1) is True + + +class TestIntegerProperty: + @staticmethod + def test__validate(): + prop = model.IntegerProperty(name="count") + value = 829038402384 + assert prop._validate(value) is value + + @staticmethod + def test__validate_bool(): + prop = model.IntegerProperty(name="count") + value = True + assert prop._validate(value) == 1 + + @staticmethod + def test__validate_bad_value(): + prop = model.IntegerProperty(name="count") + with pytest.raises(exceptions.BadValueError): + prop._validate(None) + + @staticmethod + def test__db_set_value(): + prop = model.IntegerProperty(name="count") + with pytest.raises(NotImplementedError): + prop._db_set_value(None, None, None) + + @staticmethod + def test__db_get_value(): + prop = model.IntegerProperty(name="count") + with pytest.raises(NotImplementedError): + prop._db_get_value(None, None) + + +class TestFloatProperty: + @staticmethod + def test__validate(): + prop = model.FloatProperty(name="continuous") + value = 7.25 + assert prop._validate(value) is value + + @staticmethod + def test__validate_int(): + prop = model.FloatProperty(name="continuous") + value = 1015 + assert prop._validate(value) == 1015.0 + + @staticmethod + def test__validate_bool(): + prop = model.FloatProperty(name="continuous") + value = True + assert prop._validate(value) == 1.0 + + @staticmethod + def test__validate_bad_value(): + prop = model.FloatProperty(name="continuous") + with pytest.raises(exceptions.BadValueError): + prop._validate(None) + + @staticmethod + def test__db_set_value(): + prop = model.FloatProperty(name="continuous") + with pytest.raises(NotImplementedError): + prop._db_set_value(None, None, None) + + @staticmethod + def test__db_get_value(): + prop = model.FloatProperty(name="continuous") + with pytest.raises(NotImplementedError): + prop._db_get_value(None, None) + + +class Test_CompressedValue: + @staticmethod + def test_constructor(): + value = b"abc" * 1000 + z_val = zlib.compress(value) + compressed_value = model._CompressedValue(z_val) + + assert compressed_value.z_val == z_val + + @staticmethod + def test___repr__(): + z_val = zlib.compress(b"12345678901234567890") + compressed_value = model._CompressedValue(z_val) + expected = "_CompressedValue(" + repr(z_val) + ")" + assert repr(compressed_value) == expected + + @staticmethod + def test___eq__(): + z_val1 = zlib.compress(b"12345678901234567890") + compressed_value1 = model._CompressedValue(z_val1) + z_val2 = zlib.compress(b"12345678901234567890abcde\x00") + compressed_value2 = model._CompressedValue(z_val2) + compressed_value3 = mock.sentinel.compressed_value + assert compressed_value1 == compressed_value1 + assert not compressed_value1 == compressed_value2 + assert not compressed_value1 == compressed_value3 + + @staticmethod + def test___ne__(): + z_val1 = zlib.compress(b"12345678901234567890") + compressed_value1 = model._CompressedValue(z_val1) + z_val2 = zlib.compress(b"12345678901234567890abcde\x00") + compressed_value2 = model._CompressedValue(z_val2) + compressed_value3 = mock.sentinel.compressed_value + compressed_value4 = model._CompressedValue(z_val1) + assert not compressed_value1 != compressed_value1 + assert compressed_value1 != compressed_value2 + assert compressed_value1 != compressed_value3 + assert not compressed_value1 != compressed_value4 + + @staticmethod + def test___hash__(): + z_val = zlib.compress(b"12345678901234567890") + compressed_value = model._CompressedValue(z_val) + with pytest.raises(TypeError): + hash(compressed_value) + + +class TestBlobProperty: + @staticmethod + def test_constructor_defaults(): + prop = model.BlobProperty() + # Check that none of the constructor defaults were used. + assert prop.__dict__ == {} + + @staticmethod + def test_constructor_explicit(): + prop = model.BlobProperty( + name="blob_val", + compressed=True, + indexed=False, + repeated=False, + required=True, + default=b"eleven\x11", + choices=(b"a", b"b", b"c", b"eleven\x11"), + validator=TestProperty._example_validator, + verbose_name="VALUE FOR READING", + write_empty_list=False, + ) + assert prop._name == "blob_val" + assert prop._compressed + assert not prop._indexed + assert not prop._repeated + assert prop._required + assert prop._default == b"eleven\x11" + assert prop._choices == frozenset((b"a", b"b", b"c", b"eleven\x11")) + assert prop._validator is TestProperty._example_validator + assert prop._verbose_name == "VALUE FOR READING" + assert not prop._write_empty_list + + @staticmethod + def test_constructor_compressed_and_indexed(): + with pytest.raises(NotImplementedError): + model.BlobProperty(name="foo", compressed=True, indexed=True) + + @staticmethod + def test__value_to_repr(): + prop = model.BlobProperty(name="blob") + as_repr = prop._value_to_repr("abc") + assert as_repr == "'abc'" + + @staticmethod + def test__value_to_repr_truncated(): + prop = model.BlobProperty(name="blob") + value = bytes(range(256)) * 5 + as_repr = prop._value_to_repr(value) + expected = repr(value)[: model._MAX_STRING_LENGTH] + "...'" + assert as_repr == expected + + @staticmethod + def test__validate(): + prop = model.BlobProperty(name="blob") + assert prop._validate(b"abc") is None + + @staticmethod + def test__validate_wrong_type(): + prop = model.BlobProperty(name="blob") + values = (48, {"a": "c"}) + for value in values: + with pytest.raises(exceptions.BadValueError): + prop._validate(value) + + @staticmethod + def test__validate_indexed_too_long(): + prop = model.BlobProperty(name="blob", indexed=True) + value = b"\x00" * 2000 + with pytest.raises(exceptions.BadValueError): + prop._validate(value) + + @staticmethod + def test__to_base_type(): + prop = model.BlobProperty(name="blob", compressed=True) + value = b"abc" * 10 + converted = prop._to_base_type(value) + + assert isinstance(converted, model._CompressedValue) + assert converted.z_val == zlib.compress(value) + + @staticmethod + def test__to_base_type_no_convert(): + prop = model.BlobProperty(name="blob", compressed=False) + value = b"abc" * 10 + converted = prop._to_base_type(value) + assert converted is None + + @staticmethod + def test__from_base_type(): + prop = model.BlobProperty(name="blob") + original = b"abc" * 10 + z_val = zlib.compress(original) + value = model._CompressedValue(z_val) + converted = prop._from_base_type(value) + + assert converted == original + + @staticmethod + def test__from_base_type_no_compressed_value_uncompressed(): + prop = model.BlobProperty(name="blob", compressed=True) + original = b"abc" * 10 + converted = prop._from_base_type(original) + + assert converted == original + + @staticmethod + def test__from_base_type_no_compressed_value_compressed(): + prop = model.BlobProperty(name="blob", compressed=True) + original = b"abc" * 10 + z_val = zlib.compress(original) + converted = prop._from_base_type(z_val) + + assert converted == original + + @staticmethod + def test__from_base_type_no_convert(): + prop = model.BlobProperty(name="blob") + converted = prop._from_base_type(b"abc") + assert converted is None + + @staticmethod + def test__db_set_value(): + prop = model.BlobProperty(name="blob") + with pytest.raises(NotImplementedError): + prop._db_set_value(None, None, None) + + @staticmethod + def test__db_set_compressed_meaning(): + prop = model.BlobProperty(name="blob") + with pytest.raises(NotImplementedError): + prop._db_set_compressed_meaning(None) + + @staticmethod + def test__db_set_uncompressed_meaning(): + prop = model.BlobProperty(name="blob") + with pytest.raises(NotImplementedError): + prop._db_set_uncompressed_meaning(None) + + @staticmethod + def test__db_get_value(): + prop = model.BlobProperty(name="blob") + with pytest.raises(NotImplementedError): + prop._db_get_value(None, None) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test__to_datastore_compressed(): + class ThisKind(model.Model): + foo = model.BlobProperty(compressed=True) + + uncompressed_value = b"abc" * 1000 + compressed_value = zlib.compress(uncompressed_value) + entity = ThisKind(foo=uncompressed_value) + ds_entity = model._entity_to_ds_entity(entity) + assert "foo" in ds_entity._meanings + assert ds_entity._meanings["foo"][0] == model._MEANING_COMPRESSED + assert ds_entity._meanings["foo"][1] == compressed_value + + @staticmethod + def test__to_datastore_legacy_compressed_with_prefix(in_context): + """Regression test for #602 + + https://github.com/googleapis/python-ndb/issues/602 + """ + + class ThisKind(model.Model): + bar = model.BlobProperty(compressed=True) + + class ParentKind(model.Model): + foo = model.StructuredProperty(ThisKind) + + with in_context.new(legacy_data=True).use(): + uncompressed_value = b"abc" * 1000 + compressed_value = zlib.compress(uncompressed_value) + entity = ParentKind(foo=ThisKind(bar=uncompressed_value)) + ds_entity = model._entity_to_ds_entity(entity) + assert "foo.bar" in ds_entity._meanings + assert ds_entity._meanings["foo.bar"][0] == model._MEANING_COMPRESSED + assert ds_entity._meanings["foo.bar"][1] == compressed_value + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test__to_datastore_compressed_repeated(): + class ThisKind(model.Model): + foo = model.BlobProperty(compressed=True, repeated=True) + + uncompressed_value_one = b"abc" * 1000 + compressed_value_one = zlib.compress(uncompressed_value_one) + uncompressed_value_two = b"xyz" * 1000 + compressed_value_two = zlib.compress(uncompressed_value_two) + entity = ThisKind(foo=[uncompressed_value_one, uncompressed_value_two]) + ds_entity = model._entity_to_ds_entity(entity) + assert "foo" in ds_entity._meanings + assert ds_entity._meanings["foo"][0] == model._MEANING_COMPRESSED + assert ds_entity._meanings["foo"][1] == [ + compressed_value_one, + compressed_value_two, + ] + + @staticmethod + def test__to_datastore_legacy_compressed_repeated_in_parent(in_context): + class ThisKind(model.Model): + bar = model.BlobProperty(compressed=True, repeated=False) + + class ParentKind(model.Model): + foo = model.StructuredProperty(ThisKind, repeated=True) + + with in_context.new(legacy_data=True).use(): + uncompressed_value_one = b"abc" * 1000 + compressed_value_one = zlib.compress(uncompressed_value_one) + uncompressed_value_two = b"xyz" * 1000 + compressed_value_two = zlib.compress(uncompressed_value_two) + entity = ParentKind( + foo=[ + ThisKind(bar=uncompressed_value_one), + ThisKind(bar=uncompressed_value_two), + ] + ) + ds_entity = model._entity_to_ds_entity(entity) + assert "foo.bar" not in ds_entity._meanings + assert "foo.bar" in ds_entity.keys() + assert ds_entity.get("foo.bar") == [ + compressed_value_one, + compressed_value_two, + ] + + @staticmethod + def test__to_datastore_legacy_compressed_repeated_in_parent_uninitialized( + in_context, + ): + class ThisKind(model.Model): + bar = model.BlobProperty(compressed=True, repeated=False) + + class ParentKind(model.Model): + foo = model.StructuredProperty(ThisKind, repeated=True) + + with in_context.new(legacy_data=True).use(): + uncompressed_value = b"abc" * 1000 + compressed_value = zlib.compress(uncompressed_value) + entity = ParentKind(foo=[ThisKind(), ThisKind(bar=uncompressed_value)]) + ds_entity = model._entity_to_ds_entity(entity) + assert "foo.bar" not in ds_entity._meanings + assert "foo.bar" in ds_entity.keys() + assert ds_entity.get("foo.bar") == [None, compressed_value] + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test__to_datastore_compressed_uninitialized(): + class ThisKind(model.Model): + foo = model.BlobProperty(compressed=True) + + entity = ThisKind() + ds_entity = model._entity_to_ds_entity(entity) + assert "foo" not in ds_entity._meanings + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test__to_datastore_uncompressed(): + class ThisKind(model.Model): + foo = model.BlobProperty(compressed=False) + + uncompressed_value = b"abc" + entity = ThisKind(foo=uncompressed_value) + ds_entity = model._entity_to_ds_entity(entity) + assert "foo" not in ds_entity._meanings + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test__from_datastore_compressed_to_uncompressed(): + class ThisKind(model.Model): + foo = model.BlobProperty(compressed=False) + + key = datastore.Key("ThisKind", 123, project="testing") + datastore_entity = datastore.Entity(key=key) + uncompressed_value = b"abc" * 1000 + compressed_value = zlib.compress(uncompressed_value) + datastore_entity.update({"foo": compressed_value}) + meanings = {"foo": (model._MEANING_COMPRESSED, compressed_value)} + datastore_entity._meanings = meanings + protobuf = helpers.entity_to_protobuf(datastore_entity) + entity = model._entity_from_protobuf(protobuf) + assert entity.foo == uncompressed_value + ds_entity = model._entity_to_ds_entity(entity) + assert ds_entity["foo"] == uncompressed_value + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test__from_datastore_compressed_to_compressed(): + class ThisKind(model.Model): + foo = model.BlobProperty(compressed=True) + + key = datastore.Key("ThisKind", 123, project="testing") + datastore_entity = datastore.Entity(key=key) + uncompressed_value = b"abc" * 1000 + compressed_value = zlib.compress(uncompressed_value) + datastore_entity.update({"foo": compressed_value}) + meanings = {"foo": (model._MEANING_COMPRESSED, compressed_value)} + datastore_entity._meanings = meanings + protobuf = helpers.entity_to_protobuf(datastore_entity) + entity = model._entity_from_protobuf(protobuf) + ds_entity = model._entity_to_ds_entity(entity) + assert ds_entity["foo"] == compressed_value + + @pytest.mark.usefixtures("in_context") + def test__from_datastore_compressed_repeated_to_compressed(self): + class ThisKind(model.Model): + foo = model.BlobProperty(compressed=True, repeated=True) + + key = datastore.Key("ThisKind", 123, project="testing") + datastore_entity = datastore.Entity(key=key) + uncompressed_value_one = b"abc" * 1000 + compressed_value_one = zlib.compress(uncompressed_value_one) + uncompressed_value_two = b"xyz" * 1000 + compressed_value_two = zlib.compress(uncompressed_value_two) + compressed_value = [compressed_value_one, compressed_value_two] + datastore_entity.update({"foo": compressed_value}) + meanings = { + "foo": ( + model._MEANING_COMPRESSED, + compressed_value, + ) + } + datastore_entity._meanings = meanings + protobuf = helpers.entity_to_protobuf(datastore_entity) + entity = model._entity_from_protobuf(protobuf) + ds_entity = model._entity_to_ds_entity(entity) + assert ds_entity["foo"] == [compressed_value_one, compressed_value_two] + + @pytest.mark.skipif( + [int(v) for v in datastore.__version__.split(".")] < [2, 20, 2], + reason="uses meanings semantics from datastore v2.20.2 and later", + ) + @pytest.mark.parametrize( + "meaning", + [ + (model._MEANING_COMPRESSED, None), # set root meaning + (model._MEANING_COMPRESSED, []), + (model._MEANING_COMPRESSED, [1, 1]), + (None, [model._MEANING_COMPRESSED] * 2), # set sub-meanings + ], + ) + @pytest.mark.usefixtures("in_context") + def test__from_datastore_compressed_repeated_to_compressed_tuple_meaning( + self, meaning + ): + class ThisKind(model.Model): + foo = model.BlobProperty(compressed=True, repeated=True) + + key = datastore.Key("ThisKind", 123, project="testing") + datastore_entity = datastore.Entity(key=key) + uncompressed_value_one = b"abc" * 1000 + compressed_value_one = zlib.compress(uncompressed_value_one) + uncompressed_value_two = b"xyz" * 1000 + compressed_value_two = zlib.compress(uncompressed_value_two) + compressed_value = [compressed_value_one, compressed_value_two] + datastore_entity.update({"foo": compressed_value}) + meanings = { + "foo": ( + meaning, + compressed_value, + ) + } + datastore_entity._meanings = meanings + protobuf = helpers.entity_to_protobuf(datastore_entity) + entity = model._entity_from_protobuf(protobuf) + ds_entity = model._entity_to_ds_entity(entity) + assert ds_entity["foo"] == [compressed_value_one, compressed_value_two] + + @pytest.mark.usefixtures("in_context") + def test__from_datastore_compressed_repeated_to_uncompressed(self): + class ThisKind(model.Model): + foo = model.BlobProperty(compressed=False, repeated=True) + + key = datastore.Key("ThisKind", 123, project="testing") + datastore_entity = datastore.Entity(key=key) + uncompressed_value_one = b"abc" * 1000 + compressed_value_one = zlib.compress(uncompressed_value_one) + uncompressed_value_two = b"xyz" * 1000 + compressed_value_two = zlib.compress(uncompressed_value_two) + compressed_value = [compressed_value_one, compressed_value_two] + datastore_entity.update({"foo": compressed_value}) + meanings = { + "foo": ( + model._MEANING_COMPRESSED, + compressed_value, + ) + } + datastore_entity._meanings = meanings + protobuf = helpers.entity_to_protobuf(datastore_entity) + entity = model._entity_from_protobuf(protobuf) + ds_entity = model._entity_to_ds_entity(entity) + assert ds_entity["foo"] == [uncompressed_value_one, uncompressed_value_two] + + @pytest.mark.skipif( + [int(v) for v in datastore.__version__.split(".")] < [2, 20, 2], + reason="uses meanings semantics from datastore v2.20.2 and later", + ) + @pytest.mark.parametrize( + "meaning", + [ + (model._MEANING_COMPRESSED, None), # set root meaning + (model._MEANING_COMPRESSED, []), + (model._MEANING_COMPRESSED, [1, 1]), + (None, [model._MEANING_COMPRESSED] * 2), # set sub-meanings + ], + ) + @pytest.mark.usefixtures("in_context") + def test__from_datastore_compressed_repeated_to_uncompressed_tuple_meaning( + self, meaning + ): + class ThisKind(model.Model): + foo = model.BlobProperty(compressed=False, repeated=True) + + key = datastore.Key("ThisKind", 123, project="testing") + datastore_entity = datastore.Entity(key=key) + uncompressed_value_one = b"abc" * 1000 + compressed_value_one = zlib.compress(uncompressed_value_one) + uncompressed_value_two = b"xyz" * 1000 + compressed_value_two = zlib.compress(uncompressed_value_two) + compressed_value = [compressed_value_one, compressed_value_two] + datastore_entity.update({"foo": compressed_value}) + meanings = { + "foo": ( + meaning, + compressed_value, + ) + } + datastore_entity._meanings = meanings + protobuf = helpers.entity_to_protobuf(datastore_entity) + entity = model._entity_from_protobuf(protobuf) + ds_entity = model._entity_to_ds_entity(entity) + assert ds_entity["foo"] == [uncompressed_value_one, uncompressed_value_two] + + @pytest.mark.skipif( + [int(v) for v in datastore.__version__.split(".")] < [2, 20, 2], + reason="uses meanings semantics from datastore v2.20.2 and later", + ) + @pytest.mark.parametrize( + "meaning", + [ + (None, [model._MEANING_COMPRESSED, None]), + (None, [model._MEANING_COMPRESSED, None, None]), + (1, [model._MEANING_COMPRESSED, 1]), + (None, [model._MEANING_COMPRESSED]), + ], + ) + @pytest.mark.usefixtures("in_context") + def test__from_datastore_compressed_repeated_to_uncompressed_mixed_meaning( + self, meaning + ): + """ + One item is compressed, one uncompressed + """ + + class ThisKind(model.Model): + foo = model.BlobProperty(compressed=False, repeated=True) + + key = datastore.Key("ThisKind", 123, project="testing") + datastore_entity = datastore.Entity(key=key) + uncompressed_value_one = b"abc" * 1000 + compressed_value_one = zlib.compress(uncompressed_value_one) + uncompressed_value_two = b"xyz" * 1000 + compressed_value_two = zlib.compress(uncompressed_value_two) + compressed_value = [compressed_value_one, compressed_value_two] + datastore_entity.update({"foo": compressed_value}) + meanings = { + "foo": ( + meaning, + compressed_value, + ) + } + datastore_entity._meanings = meanings + protobuf = helpers.entity_to_protobuf(datastore_entity) + entity = model._entity_from_protobuf(protobuf) + ds_entity = model._entity_to_ds_entity(entity) + assert ds_entity["foo"] == [uncompressed_value_one, compressed_value_two] + + @pytest.mark.skipif( + [int(v) for v in datastore.__version__.split(".")] < [2, 20, 2], + reason="uses meanings semantics from datastore v2.20.2 and later", + ) + @pytest.mark.parametrize( + "meaning", + [ + (None, None), + (None, []), + (None, [None]), + (None, [None, None]), + (1, []), + (1, [1]), + (1, [1, 1]), + ], + ) + @pytest.mark.usefixtures("in_context") + def test__from_datastore_compressed_repeated_no_meaning(self, meaning): + """ + could be uncompressed, but meaning not set + """ + + class ThisKind(model.Model): + foo = model.BlobProperty(compressed=False, repeated=True) + + key = datastore.Key("ThisKind", 123, project="testing") + datastore_entity = datastore.Entity(key=key) + uncompressed_value_one = b"abc" * 1000 + compressed_value_one = zlib.compress(uncompressed_value_one) + uncompressed_value_two = b"xyz" * 1000 + compressed_value_two = zlib.compress(uncompressed_value_two) + compressed_value = [compressed_value_one, compressed_value_two] + datastore_entity.update({"foo": compressed_value}) + meanings = { + "foo": ( + meaning, + compressed_value, + ) + } + datastore_entity._meanings = meanings + protobuf = helpers.entity_to_protobuf(datastore_entity) + entity = model._entity_from_protobuf(protobuf) + ds_entity = model._entity_to_ds_entity(entity) + assert ds_entity["foo"] == [compressed_value_one, compressed_value_two] + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test__from_datastore_large_value_list(): + """ + try calling _from_datastore with a meaning list smaller than the value list + """ + + prop = model.BlobProperty(compressed=False, repeated=True, name="foo") + + key = datastore.Key("ThisKind", 123, project="testing") + datastore_entity = datastore.Entity(key=key) + uncompressed_value_one = b"abc" * 1000 + compressed_value_one = zlib.compress(uncompressed_value_one) + uncompressed_value_two = b"xyz" * 1000 + compressed_value_two = zlib.compress(uncompressed_value_two) + compressed_value = [ + model._BaseValue(compressed_value_one), + model._BaseValue(compressed_value_two), + ] + datastore_entity.update({"foo": compressed_value}) + meanings = { + "foo": ( + (None, [model._MEANING_COMPRESSED]), + compressed_value, + ) + } + + datastore_entity._meanings = meanings + + updated_value = prop._from_datastore(datastore_entity, compressed_value) + assert len(updated_value) == 2 + assert updated_value[0].b_val == uncompressed_value_one + # second value should remain compressed + assert updated_value[1].b_val == compressed_value_two + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test__from_datastore_uncompressed_to_uncompressed(): + class ThisKind(model.Model): + foo = model.BlobProperty(compressed=False) + + key = datastore.Key("ThisKind", 123, project="testing") + datastore_entity = datastore.Entity(key=key) + uncompressed_value = b"abc" * 1000 + datastore_entity.update({"foo": uncompressed_value}) + protobuf = helpers.entity_to_protobuf(datastore_entity) + entity = model._entity_from_protobuf(protobuf) + assert entity.foo == uncompressed_value + ds_entity = model._entity_to_ds_entity(entity) + assert ds_entity["foo"] == uncompressed_value + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test__from_datastore_uncompressed_to_compressed(): + class ThisKind(model.Model): + foo = model.BlobProperty(compressed=True) + + key = datastore.Key("ThisKind", 123, project="testing") + datastore_entity = datastore.Entity(key=key) + uncompressed_value = b"abc" * 1000 + compressed_value = zlib.compress(uncompressed_value) + datastore_entity.update({"foo": uncompressed_value}) + protobuf = helpers.entity_to_protobuf(datastore_entity) + entity = model._entity_from_protobuf(protobuf) + ds_entity = model._entity_to_ds_entity(entity) + assert ds_entity["foo"] == compressed_value + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test__from_datastore_uncompressed_repeated_to_compressed(): + class ThisKind(model.Model): + foo = model.BlobProperty(compressed=True, repeated=True) + + key = datastore.Key("ThisKind", 123, project="testing") + datastore_entity = datastore.Entity(key=key) + uncompressed_value_one = b"abc" * 1000 + compressed_value_one = zlib.compress(uncompressed_value_one) + uncompressed_value_two = b"xyz" * 1000 + compressed_value_two = zlib.compress(uncompressed_value_two) + datastore_entity.update( + {"foo": [uncompressed_value_one, uncompressed_value_two]} + ) + protobuf = helpers.entity_to_protobuf(datastore_entity) + entity = model._entity_from_protobuf(protobuf) + ds_entity = model._entity_to_ds_entity(entity) + assert ds_entity["foo"] == [compressed_value_one, compressed_value_two] + + +class TestCompressedTextProperty: + @staticmethod + def test_constructor_defaults(): + prop = model.CompressedTextProperty() + assert not prop._indexed + assert prop._compressed + + @staticmethod + def test_constructor_explicit(): + prop = model.CompressedTextProperty(name="text", indexed=False) + assert prop._name == "text" + assert not prop._indexed + + @staticmethod + def test_constructor_not_allowed(): + with pytest.raises(NotImplementedError): + model.CompressedTextProperty(indexed=True) + + @staticmethod + def test_repr(): + prop = model.CompressedTextProperty(name="text") + expected = "CompressedTextProperty('text')" + assert repr(prop) == expected + + @staticmethod + def test__validate(): + prop = model.CompressedTextProperty(name="text") + assert prop._validate("abc") is None + + @staticmethod + def test__validate_bad_bytes(): + prop = model.CompressedTextProperty(name="text") + value = b"\x80abc" + with pytest.raises(exceptions.BadValueError): + prop._validate(value) + + @staticmethod + def test__validate_bad_type(): + prop = model.CompressedTextProperty(name="text") + with pytest.raises(exceptions.BadValueError): + prop._validate(None) + + @staticmethod + def test__to_base_type(): + prop = model.CompressedTextProperty(name="text") + assert prop._to_base_type(b"abc") is None + + @staticmethod + def test__to_base_type_converted(): + prop = model.CompressedTextProperty(name="text") + value = b"\xe2\x98\x83" + assert prop._to_base_type("\N{snowman}") == value + + @staticmethod + def test__from_base_type(): + prop = model.CompressedTextProperty(name="text") + assert prop._from_base_type("abc") is None + + @staticmethod + def test__from_base_type_converted(): + prop = model.CompressedTextProperty(name="text") + value = b"\xe2\x98\x83" + assert prop._from_base_type(value) == "\N{snowman}" + + @staticmethod + def test__from_base_type_cannot_convert(): + prop = model.CompressedTextProperty(name="text") + value = b"\x80abc" + assert prop._from_base_type(value) is None + + @staticmethod + def test__db_set_uncompressed_meaning(): + prop = model.CompressedTextProperty(name="text") + with pytest.raises(NotImplementedError): + prop._db_set_uncompressed_meaning(None) + + +class TestTextProperty: + @staticmethod + def test_constructor_defaults(): + prop = model.TextProperty() + assert not prop._indexed + + @staticmethod + def test_constructor_explicit(): + prop = model.TextProperty(name="text", indexed=False) + assert prop._name == "text" + assert not prop._indexed + + @staticmethod + def test_constructor_not_allowed(): + with pytest.raises(NotImplementedError): + model.TextProperty(indexed=True) + + @staticmethod + def test_constructor_compressed(): + prop = model.TextProperty(compressed=True) + assert isinstance(prop, model.CompressedTextProperty) + + @staticmethod + def test_repr(): + prop = model.TextProperty(name="text") + expected = "TextProperty('text')" + assert repr(prop) == expected + + @staticmethod + def test__validate(): + prop = model.TextProperty(name="text") + assert prop._validate("abc") is None + + @staticmethod + def test__validate_bad_bytes(): + prop = model.TextProperty(name="text") + value = b"\x80abc" + with pytest.raises(exceptions.BadValueError): + prop._validate(value) + + @staticmethod + def test__validate_bad_type(): + prop = model.TextProperty(name="text") + with pytest.raises(exceptions.BadValueError): + prop._validate(None) + + @staticmethod + def test__to_base_type(): + prop = model.TextProperty(name="text") + assert prop._to_base_type("abc") is None + + @staticmethod + def test__to_base_type_converted(): + prop = model.TextProperty(name="text") + value = "\N{snowman}" + assert prop._to_base_type(b"\xe2\x98\x83") == value + + @staticmethod + def test__from_base_type(): + prop = model.TextProperty(name="text") + assert prop._from_base_type("abc") is None + + @staticmethod + def test__from_base_type_converted(): + prop = model.TextProperty(name="text") + value = b"\xe2\x98\x83" + assert prop._from_base_type(value) == "\N{snowman}" + + @staticmethod + def test__from_base_type_cannot_convert(): + prop = model.TextProperty(name="text") + value = b"\x80abc" + assert prop._from_base_type(value) is None + + @staticmethod + def test__db_set_uncompressed_meaning(): + prop = model.TextProperty(name="text") + with pytest.raises(NotImplementedError): + prop._db_set_uncompressed_meaning(None) + + +class TestStringProperty: + @staticmethod + def test_constructor_defaults(): + prop = model.StringProperty() + assert prop._indexed + + @staticmethod + def test_constructor_explicit(): + prop = model.StringProperty(name="limited-text", indexed=True) + assert prop._name == "limited-text" + assert prop._indexed + + @staticmethod + def test_constructor_not_allowed(): + with pytest.raises(NotImplementedError): + model.StringProperty(indexed=False) + + @staticmethod + def test_repr(): + prop = model.StringProperty(name="limited-text") + expected = "StringProperty('limited-text')" + assert repr(prop) == expected + + @staticmethod + def test__validate_bad_length(): + prop = model.StringProperty(name="limited-text") + value = b"1" * 2000 + with pytest.raises(exceptions.BadValueError): + prop._validate(value) + + +class TestGeoPtProperty: + @staticmethod + def test__validate(): + prop = model.GeoPtProperty(name="cartesian") + value = model.GeoPt(0.0, 0.0) + assert prop._validate(value) is None + + @staticmethod + def test__validate_invalid(): + prop = model.GeoPtProperty(name="cartesian") + with pytest.raises(exceptions.BadValueError): + prop._validate(None) + + @staticmethod + def test__db_set_value(): + prop = model.GeoPtProperty(name="cartesian") + with pytest.raises(NotImplementedError): + prop._db_set_value(None, None, None) + + @staticmethod + def test__db_get_value(): + prop = model.GeoPtProperty(name="cartesian") + with pytest.raises(NotImplementedError): + prop._db_get_value(None, None) + + +class TestPickleProperty: + UNPICKLED = ["a", {"b": "c"}, {"d", "e"}, (0xF, 0x10), 0x11] + PICKLED = pickle.dumps(UNPICKLED, pickle.HIGHEST_PROTOCOL) + + def test__to_base_type(self): + prop = model.PickleProperty(name="pkl") + assert prop._to_base_type(self.UNPICKLED) == self.PICKLED + + def test__from_base_type(self): + prop = model.PickleProperty(name="pkl") + assert prop._from_base_type(self.PICKLED) == self.UNPICKLED + + @pytest.mark.usefixtures("in_context") + def test__legacy_from_base_type(self): + # GAE NDB stores pickled properties as bytes and with GAE NDB structures. + # Validate we can unpickle to a Cloud NDB structure. + # See https://github.com/googleapis/python-ndb/issues/587 + # TODO: This test fails as code will raise "_pickle.UnpicklingError: state is not a dictionary" + gae_ndb_stored_value = b"\x80\x02cunit.models\nA\nq\x01)\x81q\x02URj#j\x0fs~crwilcox-testr\x05\x0b\x12\x01A\x0c\xa2\x01\x08UnitTestr\x11\x1a\tsome_prop \x00*\x02\x08\x01r\x15\x1a\x06source \x00*\t\x1a\x07gae 2.7\x82\x01\x00b." + prop = model.PickleProperty(repeated=True) + val = prop._from_base_type(gae_ndb_stored_value) + expected = {"some_prop": 1, "source": "gae 2.7"} + actual = val.to_dict() + assert expected == actual + + +class TestJsonProperty: + @staticmethod + def test_constructor_defaults(): + prop = model.JsonProperty() + # Check that none of the constructor defaults were used. + assert prop.__dict__ == {} + + @staticmethod + def test_constructor_explicit(): + prop = model.JsonProperty( + name="json-val", + compressed=True, + json_type=tuple, + indexed=False, + repeated=False, + required=True, + default=(), + choices=((), ("b",), ("c", "d")), + validator=TestProperty._example_validator, + verbose_name="VALUE FOR READING", + write_empty_list=False, + ) + assert prop._name == "json-val" + assert prop._compressed + assert prop._json_type is tuple + assert not prop._indexed + assert not prop._repeated + assert prop._required + assert prop._default == () + assert prop._choices == frozenset([(), ("b",), ("c", "d")]) + assert prop._validator is TestProperty._example_validator + assert prop._verbose_name == "VALUE FOR READING" + assert not prop._write_empty_list + + @staticmethod + def test__validate_no_type(): + prop = model.JsonProperty(name="json-val") + assert prop._validate(b"any") is None + + @staticmethod + def test__validate_correct_type(): + prop = model.JsonProperty(name="json-val", json_type=list) + assert prop._validate([b"any", b"mini"]) is None + + @staticmethod + def test__validate_incorrect_type(): + prop = model.JsonProperty(name="json-val", json_type=dict) + with pytest.raises(TypeError): + prop._validate(14) + + @staticmethod + def test__to_base_type(): + prop = model.JsonProperty(name="json-val") + value = [14, [15, 16], {"seventeen": 18}, "\N{snowman}"] + expected = b'[14,[15,16],{"seventeen":18},"\\u2603"]' + assert prop._to_base_type(value) == expected + + @staticmethod + def test__from_base_type(): + prop = model.JsonProperty(name="json-val") + value = b'[14,true,{"a":null,"b":"\\u2603"}]' + expected = [14, True, {"a": None, "b": "\N{snowman}"}] + assert prop._from_base_type(value) == expected + + @staticmethod + def test__from_base_type_str(): + prop = model.JsonProperty(name="json-val") + value = '[14,true,{"a":null,"b":"\\u2603"}]' + expected = [14, True, {"a": None, "b": "\N{snowman}"}] + assert prop._from_base_type(value) == expected + + +class TestUser: + @staticmethod + def test_constructor_defaults(): + with pytest.raises(ValueError): + model.User() + + @staticmethod + def _make_default(): + return model.User(email="foo@example.com", _auth_domain="example.com") + + def test_constructor_explicit(self): + user_value = self._make_default() + assert user_value._auth_domain == "example.com" + assert user_value._email == "foo@example.com" + assert user_value._user_id is None + + @staticmethod + def test_constructor_no_email(): + with pytest.raises(model.UserNotFoundError): + model.User(_auth_domain="example.com") + with pytest.raises(model.UserNotFoundError): + model.User(email="", _auth_domain="example.com") + + def test_nickname(self): + user_value = self._make_default() + assert user_value.nickname() == "foo" + + @staticmethod + def test_nickname_mismatch_domain(): + user_value = model.User(email="foo@example.org", _auth_domain="example.com") + assert user_value.nickname() == "foo@example.org" + + def test_email(self): + user_value = self._make_default() + assert user_value.email() == "foo@example.com" + + @staticmethod + def test_user_id(): + user_value = model.User( + email="foo@example.com", _auth_domain="example.com", _user_id="123" + ) + assert user_value.user_id() == "123" + + def test_auth_domain(self): + user_value = self._make_default() + assert user_value.auth_domain() == "example.com" + + def test___str__(self): + user_value = self._make_default() + assert str(user_value) == "foo" + + def test___repr__(self): + user_value = self._make_default() + assert repr(user_value) == "users.User(email='foo@example.com')" + + @staticmethod + def test___repr__with_user_id(): + user_value = model.User( + email="foo@example.com", _auth_domain="example.com", _user_id="123" + ) + expected = "users.User(email='foo@example.com', _user_id='123')" + assert repr(user_value) == expected + + def test___hash__(self): + user_value = self._make_default() + expected = hash((user_value._email, user_value._auth_domain)) + assert hash(user_value) == expected + + def test___eq__(self): + user_value1 = self._make_default() + user_value2 = model.User(email="foo@example.org", _auth_domain="example.com") + user_value3 = model.User(email="foo@example.com", _auth_domain="example.org") + user_value4 = mock.sentinel.blob_key + assert user_value1 == user_value1 + assert not user_value1 == user_value2 + assert not user_value1 == user_value3 + assert not user_value1 == user_value4 + + def test___lt__(self): + user_value1 = self._make_default() + user_value2 = model.User(email="foo@example.org", _auth_domain="example.com") + user_value3 = model.User(email="foo@example.com", _auth_domain="example.org") + user_value4 = mock.sentinel.blob_key + assert not user_value1 < user_value1 + assert user_value1 < user_value2 + assert user_value1 < user_value3 + with pytest.raises(TypeError): + user_value1 < user_value4 + + @staticmethod + def test__from_ds_entity(): + assert model.User._from_ds_entity( + {"email": "foo@example.com", "auth_domain": "gmail.com"} + ) == model.User("foo@example.com", "gmail.com") + + @staticmethod + def test__from_ds_entity_with_user_id(): + assert model.User._from_ds_entity( + { + "email": "foo@example.com", + "auth_domain": "gmail.com", + "user_id": "12345", + } + ) == model.User("foo@example.com", "gmail.com", "12345") + + +class TestUserProperty: + @staticmethod + def test_constructor_defaults(): + prop = model.UserProperty() + # Check that none of the constructor defaults were used. + assert prop.__dict__ == {} + + @staticmethod + def test_constructor_auto_current_user(): + with pytest.raises(NotImplementedError): + model.UserProperty(auto_current_user=True) + + @staticmethod + def test_constructor_auto_current_user_add(): + with pytest.raises(NotImplementedError): + model.UserProperty(auto_current_user_add=True) + + @staticmethod + def test__validate(): + prop = model.UserProperty(name="u") + user_value = model.User(email="foo@example.com", _auth_domain="example.com") + assert prop._validate(user_value) is None + + @staticmethod + def test__validate_invalid(): + prop = model.UserProperty(name="u") + with pytest.raises(exceptions.BadValueError): + prop._validate(None) + + @staticmethod + def test__prepare_for_put(): + prop = model.UserProperty(name="u") + assert prop._prepare_for_put(None) is None + + @staticmethod + def test__db_set_value(): + prop = model.UserProperty(name="u") + with pytest.raises(NotImplementedError): + prop._db_set_value(None, None, None) + + @staticmethod + def test__db_get_value(): + prop = model.UserProperty(name="u") + with pytest.raises(NotImplementedError): + prop._db_get_value(None, None) + + @staticmethod + def test__to_base_type(): + prop = model.UserProperty(name="u") + entity = prop._to_base_type( + model.User( + "email", + "auth_domain", + ) + ) + assert entity["email"] == "email" + assert "email" in entity.exclude_from_indexes + assert entity["auth_domain"] == "auth_domain" + assert "auth_domain" in entity.exclude_from_indexes + assert "user_id" not in entity + + @staticmethod + def test__to_base_type_w_user_id(): + prop = model.UserProperty(name="u") + entity = prop._to_base_type(model.User("email", "auth_domain", "user_id")) + assert entity["email"] == "email" + assert "email" in entity.exclude_from_indexes + assert entity["auth_domain"] == "auth_domain" + assert "auth_domain" in entity.exclude_from_indexes + assert entity["user_id"] == "user_id" + assert "user_id" in entity.exclude_from_indexes + + @staticmethod + def test__from_base_type(): + prop = model.UserProperty(name="u") + assert prop._from_base_type( + {"email": "email", "auth_domain": "auth_domain"} + ) == model.User("email", "auth_domain") + + @staticmethod + def test__to_datastore(): + class SomeKind(model.Model): + u = model.UserProperty() + + entity = SomeKind(u=model.User("email", "auth_domain")) + data = {} + SomeKind.u._to_datastore(entity, data) + meaning, ds_entity = data["_meanings"]["u"] + assert meaning == model._MEANING_PREDEFINED_ENTITY_USER + assert data["u"] == ds_entity + + @staticmethod + def test__to_datastore_no_value(): + class SomeKind(model.Model): + u = model.UserProperty() + + entity = SomeKind() + data = {} + SomeKind.u._to_datastore(entity, data) + assert data == {"u": None} + + +class TestKeyProperty: + @staticmethod + def test_constructor_defaults(): + prop = model.KeyProperty() + # Check that none of the constructor defaults were used. + assert prop.__dict__ == {} + + @staticmethod + def test_constructor_too_many_positional(): + with pytest.raises(TypeError): + model.KeyProperty("a", None, None) + + @staticmethod + def test_constructor_positional_name_twice(): + with pytest.raises(TypeError): + model.KeyProperty("a", "b") + + @staticmethod + def test_constructor_positional_kind_twice(): + class Simple(model.Model): + pass + + with pytest.raises(TypeError): + model.KeyProperty(Simple, Simple) + + @staticmethod + def test_constructor_positional_bad_type(): + with pytest.raises(TypeError): + model.KeyProperty("a", mock.sentinel.bad) + + @staticmethod + def test_constructor_name_both_ways(): + with pytest.raises(TypeError): + model.KeyProperty("a", name="b") + + @staticmethod + def test_constructor_kind_both_ways(): + class Simple(model.Model): + pass + + with pytest.raises(TypeError): + model.KeyProperty(Simple, kind="Simple") + + @staticmethod + def test_constructor_bad_kind(): + with pytest.raises(TypeError): + model.KeyProperty(kind=mock.sentinel.bad) + + @staticmethod + def test_constructor_positional(): + class Simple(model.Model): + pass + + prop = model.KeyProperty(None, None) + assert prop._name is None + assert prop._kind is None + + name_only_args = [("keyp",), (None, "keyp"), ("keyp", None)] + for args in name_only_args: + prop = model.KeyProperty(*args) + assert prop._name == "keyp" + assert prop._kind is None + + kind_only_args = [(Simple,), (None, Simple), (Simple, None)] + for args in kind_only_args: + prop = model.KeyProperty(*args) + assert prop._name is None + assert prop._kind == "Simple" + + both_args = [("keyp", Simple), (Simple, "keyp")] + for args in both_args: + prop = model.KeyProperty(*args) + assert prop._name == "keyp" + assert prop._kind == "Simple" + + @staticmethod + def test_constructor_hybrid(): + class Simple(model.Model): + pass + + # prop1 will get a TypeError due to Python 2.7 compatibility + # prop1 = model.KeyProperty(Simple, name="keyp") + prop2 = model.KeyProperty("keyp", kind=Simple) + prop3 = model.KeyProperty("keyp", kind="Simple") + for prop in (prop2, prop3): + assert prop._name == "keyp" + assert prop._kind == "Simple" + + @staticmethod + def test_repr(): + prop = model.KeyProperty("keyp", kind="Simple", repeated=True) + expected = "KeyProperty('keyp', kind='Simple', repeated=True)" + assert repr(prop) == expected + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test__validate(): + kind = "Simple" + prop = model.KeyProperty("keyp", kind=kind) + value = key_module.Key(kind, 182983) + assert prop._validate(value) is None + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test__validate_without_kind(): + prop = model.KeyProperty("keyp") + value = key_module.Key("Foo", "Bar") + assert prop._validate(value) is None + + @staticmethod + def test__validate_non_key(): + prop = model.KeyProperty("keyp") + with pytest.raises(exceptions.BadValueError): + prop._validate(None) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test__validate_partial_key(): + prop = model.KeyProperty("keyp") + value = key_module.Key("Kynd", None) + with pytest.raises(exceptions.BadValueError): + prop._validate(value) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test__validate_wrong_kind(): + prop = model.KeyProperty("keyp", kind="Simple") + value = key_module.Key("Kynd", 184939) + with pytest.raises(exceptions.BadValueError): + prop._validate(value) + + @staticmethod + def test__db_set_value(): + prop = model.KeyProperty("keyp", kind="Simple") + with pytest.raises(NotImplementedError): + prop._db_set_value(None, None, None) + + @staticmethod + def test__db_get_value(): + prop = model.KeyProperty("keyp", kind="Simple") + with pytest.raises(NotImplementedError): + prop._db_get_value(None, None) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test__to_base_type(): + prop = model.KeyProperty("keyp") + value = key_module.Key("Kynd", 123) + assert prop._to_base_type(value) is value._key + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test__to_base_type_wrong_type(): + prop = model.KeyProperty("keyp") + value = ("Kynd", 123) + with pytest.raises(TypeError): + assert prop._to_base_type(value) is value._key + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test__from_base_type(): + prop = model.KeyProperty("keyp") + ds_value = ds_key_module.Key("Kynd", 123, project="testing") + value = prop._from_base_type(ds_value) + assert value.kind() == "Kynd" + assert value.id() == 123 + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_equality(): + class KeyPropTestModel(model.Model): + k = model.KeyProperty() + + kptm1 = KeyPropTestModel(k=key_module.Key("k", 1)) + kptm2 = KeyPropTestModel(k=key_module.Key("k", 1, database="")) + assert kptm1 == kptm2 + + +class TestBlobKeyProperty: + @staticmethod + def test__validate(): + prop = model.BlobKeyProperty(name="object-gcs") + value = model.BlobKey(b"abc") + assert prop._validate(value) is None + + @staticmethod + def test__validate_invalid(): + prop = model.BlobKeyProperty(name="object-gcs") + with pytest.raises(exceptions.BadValueError): + prop._validate(None) + + @staticmethod + def test__db_set_value(): + prop = model.BlobKeyProperty(name="object-gcs") + with pytest.raises(NotImplementedError): + prop._db_set_value(None, None, None) + + @staticmethod + def test__db_get_value(): + prop = model.BlobKeyProperty(name="object-gcs") + with pytest.raises(NotImplementedError): + prop._db_get_value(None, None) + + +class TestDateTimeProperty: + @staticmethod + def _string_validator(prop, value): + return datetime.datetime.strptime(value, "%Y-%m-%d %H:%M:%S") + + @staticmethod + def test_constructor_defaults(): + prop = model.DateTimeProperty() + # Check that none of the constructor defaults were used. + assert prop.__dict__ == {} + + @staticmethod + def test_constructor_explicit(): + now = datetime.datetime.utcnow() + prop = model.DateTimeProperty( + name="dt_val", + auto_now=True, + auto_now_add=False, + tzinfo=timezone(-4), + indexed=False, + repeated=False, + required=True, + default=now, + validator=TestDateTimeProperty._string_validator, + verbose_name="VALUE FOR READING", + write_empty_list=False, + ) + assert prop._name == "dt_val" + assert prop._auto_now + assert not prop._auto_now_add + assert prop._tzinfo == timezone(-4) + assert not prop._indexed + assert not prop._repeated + assert prop._required + assert prop._default == now + assert prop._choices is None + assert prop._validator is TestDateTimeProperty._string_validator + assert prop._verbose_name == "VALUE FOR READING" + assert not prop._write_empty_list + + @staticmethod + def test_constructor_repeated(): + with pytest.raises(ValueError): + model.DateTimeProperty(name="dt_val", auto_now=True, repeated=True) + with pytest.raises(ValueError): + model.DateTimeProperty(name="dt_val", auto_now_add=True, repeated=True) + + prop = model.DateTimeProperty(name="dt_val", repeated=True) + assert prop._repeated + + @staticmethod + def test__validate(): + prop = model.DateTimeProperty(name="dt_val") + value = datetime.datetime.utcnow() + assert prop._validate(value) is None + + @staticmethod + def test__do_validate_with_validator(): + prop = model.DateTimeProperty( + name="dt_val", validator=TestDateTimeProperty._string_validator + ) + value = "2020-08-08 12:53:54" + # validator must be called first to convert to datetime + assert prop._do_validate(value) == datetime.datetime(2020, 8, 8, 12, 53, 54) + + @staticmethod + def test__validate_invalid(): + prop = model.DateTimeProperty(name="dt_val") + with pytest.raises(exceptions.BadValueError): + prop._validate(None) + + @staticmethod + def test__validate_with_tz(): + prop = model.DateTimeProperty(name="dt_val") + value = datetime.datetime.now(tz=pytz.utc) + with pytest.raises(exceptions.BadValueError): + prop._validate(value) + + @staticmethod + def test__now(): + dt_val = model.DateTimeProperty._now() + assert isinstance(dt_val, datetime.datetime) + + @staticmethod + def test__prepare_for_put(): + prop = model.DateTimeProperty(name="dt_val") + entity = mock.Mock(_values={}, spec=("_values",)) + + with mock.patch.object(prop, "_now") as _now: + prop._prepare_for_put(entity) + assert entity._values == {} + _now.assert_not_called() + + @staticmethod + def test__prepare_for_put_auto_now(): + prop = model.DateTimeProperty(name="dt_val", auto_now=True) + values1 = {} + values2 = {prop._name: mock.sentinel.dt} + for values in (values1, values2): + entity = mock.Mock(_values=values, spec=("_values",)) + + with mock.patch.object(prop, "_now") as _now: + prop._prepare_for_put(entity) + assert entity._values == {prop._name: _now.return_value} + _now.assert_called_once_with() + + @staticmethod + def test__prepare_for_put_auto_now_add(): + prop = model.DateTimeProperty(name="dt_val", auto_now_add=True) + values1 = {} + values2 = {prop._name: mock.sentinel.dt} + for values in (values1, values2): + entity = mock.Mock(_values=values.copy(), spec=("_values",)) + + with mock.patch.object(prop, "_now") as _now: + prop._prepare_for_put(entity) + if values: + assert entity._values == values + _now.assert_not_called() + else: + assert entity._values != values + assert entity._values == {prop._name: _now.return_value} + _now.assert_called_once_with() + + @staticmethod + def test__db_set_value(): + prop = model.DateTimeProperty(name="dt_val") + with pytest.raises(NotImplementedError): + prop._db_set_value(None, None, None) + + @staticmethod + def test__db_get_value(): + prop = model.DateTimeProperty(name="dt_val") + with pytest.raises(NotImplementedError): + prop._db_get_value(None, None) + + @staticmethod + def test__from_base_type_no_timezone(): + prop = model.DateTimeProperty(name="dt_val") + value = datetime.datetime.now() + assert prop._from_base_type(value) is None + + @staticmethod + def test__from_base_type_timezone(): + prop = model.DateTimeProperty(name="dt_val") + value = datetime.datetime(2010, 5, 12, tzinfo=pytz.utc) + assert prop._from_base_type(value) == datetime.datetime(2010, 5, 12) + + @staticmethod + def test__from_base_type_convert_timezone(): + prop = model.DateTimeProperty(name="dt_val", tzinfo=timezone(-4)) + value = datetime.datetime(2010, 5, 12, tzinfo=pytz.utc) + assert prop._from_base_type(value) == datetime.datetime( + 2010, 5, 11, 20, tzinfo=timezone(-4) + ) + + @staticmethod + def test__from_base_type_naive_with_timezone(): + prop = model.DateTimeProperty(name="dt_val", tzinfo=timezone(-4)) + value = datetime.datetime(2010, 5, 12) + assert prop._from_base_type(value) == datetime.datetime( + 2010, 5, 11, 20, tzinfo=timezone(-4) + ) + + @staticmethod + def test__from_base_type_int(): + prop = model.DateTimeProperty(name="dt_val") + value = 1273632120000000 + assert prop._from_base_type(value) == datetime.datetime(2010, 5, 12, 2, 42) + + @staticmethod + def test__to_base_type_noop(): + prop = model.DateTimeProperty(name="dt_val", tzinfo=timezone(-4)) + value = datetime.datetime(2010, 5, 12) + assert prop._to_base_type(value) is None + + @staticmethod + def test__to_base_type_convert_to_utc(): + prop = model.DateTimeProperty(name="dt_val", tzinfo=timezone(-4)) + value = datetime.datetime(2010, 5, 12, tzinfo=timezone(-4)) + assert prop._to_base_type(value) == datetime.datetime( + 2010, 5, 12, 4, tzinfo=pytz.utc + ) + + +class TestDateProperty: + @staticmethod + def test__validate(): + prop = model.DateProperty(name="d_val") + value = datetime.datetime.utcnow().date() + assert prop._validate(value) is None + + @staticmethod + def test__validate_invalid(): + prop = model.DateProperty(name="d_val") + with pytest.raises(exceptions.BadValueError): + prop._validate(None) + + @staticmethod + def test__now(): + d_val = model.DateProperty._now() + assert isinstance(d_val, datetime.date) + + def test__to_base_type(self): + prop = model.DateProperty(name="d_val") + value = datetime.date(2014, 10, 7) + expected = datetime.datetime(2014, 10, 7) + assert prop._to_base_type(value) == expected + + def test__to_base_type_invalid(self): + prop = model.DateProperty(name="d_val") + with pytest.raises(TypeError): + prop._to_base_type(None) + + def test__from_base_type(self): + prop = model.DateProperty(name="d_val") + value = datetime.datetime(2014, 10, 7) + expected = datetime.date(2014, 10, 7) + assert prop._from_base_type(value) == expected + + +class TestTimeProperty: + @staticmethod + def test__validate(): + prop = model.TimeProperty(name="t_val") + value = datetime.datetime.utcnow().time() + assert prop._validate(value) is None + + @staticmethod + def test__validate_invalid(): + prop = model.TimeProperty(name="t_val") + with pytest.raises(exceptions.BadValueError): + prop._validate(None) + + @staticmethod + def test__now(): + t_val = model.TimeProperty._now() + assert isinstance(t_val, datetime.time) + + def test__to_base_type(self): + prop = model.TimeProperty(name="t_val") + value = datetime.time(17, 57, 18, 453529) + expected = datetime.datetime(1970, 1, 1, 17, 57, 18, 453529) + assert prop._to_base_type(value) == expected + + def test__to_base_type_invalid(self): + prop = model.TimeProperty(name="t_val") + with pytest.raises(TypeError): + prop._to_base_type(None) + + def test__from_base_type(self): + prop = model.TimeProperty(name="t_val") + value = datetime.datetime(1970, 1, 1, 1, 15, 59, 900101) + expected = datetime.time(1, 15, 59, 900101) + assert prop._from_base_type(value) == expected + + +class TestStructuredProperty: + @staticmethod + def test_constructor(): + class Mine(model.Model): + foo = model.StringProperty() + + prop = model.StructuredProperty(Mine) + assert prop._model_class == Mine + + @staticmethod + def test_constructor_with_repeated(): + class Mine(model.Model): + foo = model.StringProperty() + + prop = model.StructuredProperty(Mine, repeated=True) + assert prop._model_class == Mine + + @staticmethod + def test_constructor_with_repeated_prop(): + class Mine(model.Model): + foo = model.StringProperty(repeated=True) + + with pytest.raises(TypeError): + model.StructuredProperty(Mine, repeated=True) + + @staticmethod + def test__validate(): + class Mine(model.Model): + foo = model.StringProperty() + + prop = model.StructuredProperty(Mine) + instance = Mine() + assert prop._validate(instance) is None + + @staticmethod + def test__validate_with_dict(): + class Mine(model.Model): + foo = model.StringProperty() + + prop = model.StructuredProperty(Mine) + assert isinstance(prop._validate({}), Mine) + + @staticmethod + def test__validate_invalid(): + class Mine(model.Model): + foo = model.StringProperty() + + prop = model.StructuredProperty(Mine) + with pytest.raises(exceptions.BadValueError): + prop._validate(None) + + @staticmethod + def test__get_value(): + class Mine(model.Model): + foo = model.StringProperty() + + class MineToo(model.Model): + bar = model.StructuredProperty(Mine) + + mine = Mine() + minetoo = MineToo() + minetoo.bar = mine + assert MineToo.bar._get_value(minetoo) == mine + + @staticmethod + def test__get_value_unprojected(): + class Mine(model.Model): + foo = model.StringProperty() + + class MineToo(model.Model): + bar = model.StructuredProperty(Mine) + + minetoo = MineToo(projection=("saywhat",)) + with pytest.raises(model.UnprojectedPropertyError): + MineToo.bar._get_value(minetoo) + + @staticmethod + def test__get_for_dict(): + class Mine(model.Model): + foo = model.StringProperty() + + class MineToo(model.Model): + bar = model.StructuredProperty(Mine) + + mine = Mine(foo="Foo") + minetoo = MineToo() + minetoo.bar = mine + assert MineToo.bar._get_for_dict(minetoo) == {"foo": "Foo"} + + @staticmethod + def test__get_for_dict_repeated(): + class Mine(model.Model): + foo = model.StringProperty() + + class MineToo(model.Model): + bar = model.StructuredProperty(Mine, repeated=True) + + mine = Mine(foo="Foo") + minetoo = MineToo() + minetoo.bar = [mine, mine] + assert MineToo.bar._get_for_dict(minetoo) == [ + {"foo": "Foo"}, + {"foo": "Foo"}, + ] + + @staticmethod + def test__get_for_dict_no_value(): + class Mine(model.Model): + foo = model.StringProperty() + + class MineToo(model.Model): + bar = model.StructuredProperty(Mine) + + minetoo = MineToo() + minetoo.bar = None + assert MineToo.bar._get_for_dict(minetoo) is None + + @staticmethod + def test___getattr__(): + class Mine(model.Model): + foo = model.StringProperty() + + prop = model.StructuredProperty(Mine) + prop._name = "bar" + assert isinstance(prop.foo, model.StringProperty) + assert prop.foo._name == "bar.foo" + + @staticmethod + def test___getattr__use_codename(): + class Mine(model.Model): + foo = model.StringProperty("notfoo") + + prop = model.StructuredProperty(Mine) + prop._name = "bar" + assert isinstance(prop.foo, model.StringProperty) + assert prop.foo._name == "bar.notfoo" + + @staticmethod + def test___getattr___bad_prop(): + class Mine(model.Model): + foo = model.StringProperty() + + prop = model.StructuredProperty(Mine) + with pytest.raises(AttributeError): + prop.baz + + @staticmethod + def test__comparison_eq(): + class Mine(model.Model): + foo = model.StringProperty() + + prop = model.StructuredProperty(Mine) + prop._name = "bar" + mine = Mine(foo="baz") + assert prop._comparison("=", mine) == query_module.FilterNode( + "bar.foo", "=", "baz" + ) + + @staticmethod + def test__comparison_other(): + class Mine(model.Model): + foo = model.StringProperty() + + prop = model.StructuredProperty(Mine) + mine = Mine(foo="baz") + with pytest.raises(exceptions.BadFilterError): + prop._comparison(">", mine) + + @staticmethod + def test__comparison_not_indexed(): + class Mine(model.Model): + foo = model.StringProperty() + + prop = model.StructuredProperty(Mine, indexed=False) + mine = Mine(foo="baz") + with pytest.raises(exceptions.BadFilterError): + prop._comparison("=", mine) + + @staticmethod + def test__comparison_value_none(): + class Mine(model.Model): + foo = model.StringProperty() + + prop = model.StructuredProperty(Mine) + prop._name = "bar" + assert prop._comparison("=", None) == query_module.FilterNode("bar", "=", None) + + @staticmethod + def test__comparison_repeated(): + class Mine(model.Model): + foo = model.StringProperty(repeated=True) + bar = model.StringProperty() + + prop = model.StructuredProperty(Mine) + prop._name = "baz" + mine = Mine(bar="x") + assert prop._comparison("=", mine) == query_module.FilterNode( + "baz.bar", "=", "x" + ) + + @staticmethod + def test__comparison_repeated_no_filters(): + class Mine(model.Model): + foo = model.StringProperty(repeated=True) + + prop = model.StructuredProperty(Mine) + prop._name = "bar" + mine = Mine(foo=[]) + with pytest.raises(exceptions.BadFilterError): + prop._comparison("=", mine) + + @staticmethod + def test__comparison_repeated_non_empty(): + class Mine(model.Model): + foo = model.StringProperty(repeated=True) + + prop = model.StructuredProperty(Mine) + prop._name = "bar" + mine = Mine(foo=["baz"]) + with pytest.raises(exceptions.BadFilterError): + prop._comparison("=", mine) + + @staticmethod + def test__comparison_repeated_empty(): + class Mine(model.Model): + foo = model.StringProperty(repeated=True) + + prop = model.StructuredProperty(Mine) + prop._name = "bar" + mine = Mine(foo=[]) + with pytest.raises(exceptions.BadFilterError): + prop._comparison("=", mine) + + @staticmethod + def test__comparison_multiple(): + class Mine(model.Model): + foo = model.StringProperty() + bar = model.StringProperty() + + prop = model.StructuredProperty(Mine) + prop._name = "baz" + mine = Mine(foo="x", bar="y") + comparison = prop._comparison("=", mine) + compared = query_module.AND( + query_module.FilterNode("baz.bar", "=", "y"), + query_module.FilterNode("baz.foo", "=", "x"), + ) + # Sort them and test each one is in both lists. + assert all( # pragma: NO BRANCH + [ + a == b + for a, b in zip( + sorted(comparison._nodes, key=lambda a: a._name), + sorted(compared._nodes, key=lambda a: a._name), + ) + ] + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test__comparison_repeated_structured(): + class Mine(model.Model): + foo = model.StringProperty() + bar = model.StringProperty() + + prop = model.StructuredProperty(Mine, repeated=True) + prop._name = "bar" + mine = Mine(foo="x", bar="y") + conjunction = prop._comparison("=", mine) + # Sort them before making any comparisons. + conjunction_nodes = sorted( + conjunction._nodes, key=lambda a: getattr(a, "_name", "z") + ) + assert conjunction_nodes[0] == query_module.FilterNode("bar.bar", "=", "y") + assert conjunction_nodes[1] == query_module.FilterNode("bar.foo", "=", "x") + assert conjunction_nodes[2].predicate.name == "bar" + assert sorted(conjunction_nodes[2].predicate.match_keys) == [ + "bar", + "foo", + ] + match_values = sorted( + conjunction_nodes[2].predicate.match_values, + key=lambda a: a.string_value, + ) + assert match_values[0].string_value == "x" + assert match_values[1].string_value == "y" + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_IN(): + class Mine(model.Model): + foo = model.StringProperty() + + prop = model.StructuredProperty(Mine) + prop._name = "baz" + mine = Mine(foo="x") + minetoo = Mine(foo="y") + assert prop.IN([mine, minetoo]) == query_module.OR( + query_module.FilterNode("baz.foo", "=", "x"), + query_module.FilterNode("baz.foo", "=", "y"), + ) + + @staticmethod + def test_IN_no_value(): + class Mine(model.Model): + foo = model.StringProperty() + + prop = model.StructuredProperty(Mine) + prop._name = "baz" + assert prop.IN([]) == query_module.FalseNode() + + @staticmethod + def test_IN_bad_value(): + class Mine(model.Model): + foo = model.StringProperty() + + prop = model.StructuredProperty(Mine) + prop._name = "baz" + with pytest.raises(exceptions.BadArgumentError): + prop.IN(None) + + @staticmethod + def test__has_value(): + class Mine(model.Model): + foo = model.StringProperty() + + class MineToo(model.Model): + bar = model.StructuredProperty(Mine) + + mine = Mine(foo="Foo") + minetoo = MineToo(bar=mine) + assert MineToo.bar._has_value(minetoo) is True + + @staticmethod + def test__has_value_with_rest(): + class Mine(model.Model): + foo = model.StringProperty() + + class MineToo(model.Model): + bar = model.StructuredProperty(Mine) + + mine = Mine(foo="Foo") + minetoo = MineToo(bar=mine) + assert MineToo.bar._has_value(minetoo, rest=["foo"]) is True + + @staticmethod + def test__has_value_with_rest_subent_none(): + class Mine(model.Model): + foo = model.StringProperty() + + class MineToo(model.Model): + bar = model.StructuredProperty(Mine) + + minetoo = MineToo(bar=None) + assert MineToo.bar._has_value(minetoo, rest=["foo"]) is True + + @staticmethod + def test__has_value_with_rest_repeated_one(): + class Mine(model.Model): + foo = model.StringProperty() + + class MineToo(model.Model): + bar = model.StructuredProperty(Mine, repeated=True) + + mine = Mine(foo="x") + minetoo = MineToo(bar=[mine]) + assert MineToo.bar._has_value(minetoo, rest=["foo"]) is True + + @staticmethod + def test__has_value_with_rest_repeated_two(): + class Mine(model.Model): + foo = model.StringProperty() + + class MineToo(model.Model): + bar = model.StructuredProperty(Mine, repeated=True) + + mine = Mine(foo="x") + mine2 = Mine(foo="y") + minetoo = MineToo(bar=[mine, mine2]) + with pytest.raises(RuntimeError): + MineToo.bar._has_value(minetoo, rest=["foo"]) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test__has_value_with_rest_subprop_none(): + class Mine(model.Model): + foo = model.StringProperty() + + class MineToo(model.Model): + bar = model.StructuredProperty(Mine) + + mine = Mine(foo="Foo") + minetoo = MineToo(bar=mine) + assert MineToo.bar._has_value(minetoo, rest=[None]) is False + + @staticmethod + def test__check_property(): + class Mine(model.Model): + foo = model.StringProperty() + + class MineToo(model.Model): + bar = model.StructuredProperty(Mine) + + assert MineToo.bar._check_property("foo") is None + + @staticmethod + def test__check_property_with_sub(): + class Mine(model.Model): + foo = model.StringProperty() + + class MineToo(model.Model): + bar = model.StructuredProperty(Mine) + + class MineThree(model.Model): + baz = model.StructuredProperty(MineToo) + + assert MineThree.baz._check_property("bar.foo") is None + + @staticmethod + def test__check_property_invalid(): + class Mine(model.Model): + foo = model.StringProperty() + + class MineToo(model.Model): + bar = model.StructuredProperty(Mine) + + with pytest.raises(model.InvalidPropertyError): + MineToo.bar._check_property("baz") + + @staticmethod + def test__check_property_no_rest(): + class Mine(model.Model): + foo = model.StringProperty() + + class MineToo(model.Model): + bar = model.StructuredProperty(Mine) + + with pytest.raises(model.InvalidPropertyError): + MineToo.bar._check_property() + + @staticmethod + def test__get_value_size(): + class Mine(model.Model): + foo = model.StringProperty() + + class MineToo(model.Model): + bar = model.StructuredProperty(Mine) + + mine = Mine(foo="Foo") + minetoo = MineToo(bar=mine) + assert MineToo.bar._get_value_size(minetoo) == 1 + + @staticmethod + def test__get_value_size_list(): + class Mine(model.Model): + foo = model.StringProperty() + + class MineToo(model.Model): + bar = model.StructuredProperty(Mine, repeated=True) + + mine = Mine(foo="Foo") + minetoo = MineToo(bar=[mine]) + assert MineToo.bar._get_value_size(minetoo) == 1 + + @staticmethod + def test__get_value_size_none(): + class Mine(model.Model): + foo = model.StringProperty() + + class MineToo(model.Model): + bar = model.StructuredProperty(Mine) + + minetoo = MineToo(bar=None) + assert MineToo.bar._get_value_size(minetoo) == 0 + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test__to_base_type(): + class Mine(model.Model): + foo = model.StringProperty() + + class MineToo(model.Model): + bar = model.StructuredProperty(Mine) + + minetoo = MineToo(bar=Mine(foo="bar")) + ds_bar = MineToo.bar._to_base_type(minetoo.bar) + assert isinstance(ds_bar, entity_module.Entity) + assert ds_bar["foo"] == "bar" + assert ds_bar.key is None + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test__to_base_type_bad_value(): + class Mine(model.Model): + foo = model.StringProperty() + + class MineToo(model.Model): + bar = model.StructuredProperty(Mine) + + with pytest.raises(TypeError): + MineToo.bar._to_base_type("badvalue") + + def test__from_base_type(self): + class Simple(model.Model): + pass + + prop = model.StructuredProperty(Simple, name="ent") + entity = entity_module.Entity() + expected = Simple() + assert prop._from_base_type(entity) == expected + + def test__from_base_type_noop(self): + class Simple(model.Model): + pass + + prop = model.StructuredProperty(Simple, name="ent") + value = object() + assert prop._from_base_type(value) is value + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test__to_datastore_non_legacy(): + class SubKind(model.Model): + bar = model.Property() + + class SomeKind(model.Model): + foo = model.StructuredProperty(SubKind) + + entity = SomeKind(foo=SubKind(bar="baz")) + data = {} + assert SomeKind.foo._to_datastore(entity, data) == ("foo",) + assert len(data) == 1 + assert dict(data["foo"]) == {"bar": "baz"} + + @staticmethod + def test__to_datastore_legacy(in_context): + class SubKind(model.Model): + bar = model.Property() + + class SomeKind(model.Model): + foo = model.StructuredProperty(SubKind) + + with in_context.new(legacy_data=True).use(): + entity = SomeKind(foo=SubKind(bar="baz")) + data = {} + assert SomeKind.foo._to_datastore(entity, data) == {"foo.bar"} + assert data == {"foo.bar": "baz"} + + @staticmethod + def test__to_datastore_legacy_subentity_is_None(in_context): + class SubKind(model.Model): + bar = model.Property() + + class SomeKind(model.Model): + foo = model.StructuredProperty(SubKind) + + with in_context.new(legacy_data=True).use(): + entity = SomeKind() + data = {} + assert SomeKind.foo._to_datastore(entity, data) == {"foo"} + assert data == {"foo": None} + + @staticmethod + def test__to_datastore_legacy_subentity_is_unindexed(in_context): + class SubKind(model.Model): + bar = model.BlobProperty(indexed=False) + + class SomeKind(model.Model): + foo = model.StructuredProperty(SubKind) + + with in_context.new(legacy_data=True).use(): + entity = SomeKind(foo=SubKind()) + data = {"_exclude_from_indexes": []} + assert SomeKind.foo._to_datastore(entity, data) == {"foo.bar"} + assert data.pop("_exclude_from_indexes") == ["foo.bar"] + assert data == {"foo.bar": None} + + @staticmethod + def test__to_datastore_legacy_repeated(in_context): + class SubKind(model.Model): + bar = model.Property() + + class SomeKind(model.Model): + foo = model.StructuredProperty(SubKind, repeated=True) + + with in_context.new(legacy_data=True).use(): + entity = SomeKind(foo=[SubKind(bar="baz"), SubKind(bar="boz")]) + data = {} + assert SomeKind.foo._to_datastore(entity, data) == {"foo.bar"} + assert data == {"foo.bar": ["baz", "boz"]} + + @staticmethod + def test__to_datastore_legacy_repeated_empty_value(in_context): + class SubKind(model.Model): + bar = model.Property() + + class SomeKind(model.Model): + foo = model.StructuredProperty(SubKind, repeated=True) + + with in_context.new(legacy_data=True).use(): + entity = SomeKind(foo=[]) + data = {} + assert SomeKind.foo._to_datastore(entity, data) == set() + assert data == {} + + @staticmethod + def test__prepare_for_put(): + class SubKind(model.Model): + bar = model.Property() + + class SomeKind(model.Model): + foo = model.StructuredProperty(SubKind) + + entity = SomeKind(foo=SubKind()) + entity.foo._prepare_for_put = mock.Mock() + SomeKind.foo._prepare_for_put(entity) + entity.foo._prepare_for_put.assert_called_once_with() + + @staticmethod + def test__prepare_for_put_repeated(): + class SubKind(model.Model): + bar = model.Property() + + class SomeKind(model.Model): + foo = model.StructuredProperty(SubKind, repeated=True) + + entity = SomeKind(foo=[SubKind(), SubKind()]) + entity.foo[0]._prepare_for_put = mock.Mock() + entity.foo[1]._prepare_for_put = mock.Mock() + SomeKind.foo._prepare_for_put(entity) + entity.foo[0]._prepare_for_put.assert_called_once_with() + entity.foo[1]._prepare_for_put.assert_called_once_with() + + @staticmethod + def test__prepare_for_put_repeated_None(): + class SubKind(model.Model): + bar = model.Property() + + class SomeKind(model.Model): + foo = model.StructuredProperty(SubKind) + + entity = SomeKind() + SomeKind.foo._prepare_for_put(entity) # noop + + +class TestLocalStructuredProperty: + @staticmethod + def test_constructor_indexed(): + class Simple(model.Model): + pass + + with pytest.raises(NotImplementedError): + model.LocalStructuredProperty(Simple, name="ent", indexed=True) + + @staticmethod + def test__validate(): + class Simple(model.Model): + pass + + prop = model.LocalStructuredProperty(Simple, name="ent") + value = Simple() + assert prop._validate(value) is None + + @staticmethod + def test__validate_invalid(): + class Simple(model.Model): + pass + + class NotSimple(model.Model): + pass + + prop = model.LocalStructuredProperty(Simple, name="ent") + with pytest.raises(exceptions.BadValueError): + prop._validate(NotSimple()) + + @staticmethod + def test__validate_dict(): + class Simple(model.Model): + pass + + prop = model.LocalStructuredProperty(Simple, name="ent") + value = {} + assert isinstance(prop._validate(value), Simple) + + @staticmethod + def test__validate_dict_invalid(): + class Simple(model.Model): + pass + + prop = model.LocalStructuredProperty(Simple, name="ent") + with pytest.raises(exceptions.BadValueError): + prop._validate({"key": "value"}) + + @pytest.mark.usefixtures("in_context") + def test__to_base_type(self): + class Simple(model.Model): + pass + + prop = model.LocalStructuredProperty(Simple, name="ent") + value = Simple() + entity = entity_module.Entity() + pb = helpers.entity_to_protobuf(entity)._pb + expected = pb.SerializePartialToString() + assert prop._to_base_type(value) == expected + + @pytest.mark.usefixtures("in_context") + def test__to_base_type_invalid(self): + class Simple(model.Model): + pass + + class NotSimple(model.Model): + pass + + prop = model.LocalStructuredProperty(Simple, name="ent") + with pytest.raises(TypeError): + prop._to_base_type(NotSimple()) + + def test__from_base_type(self): + class Simple(model.Model): + pass + + prop = model.LocalStructuredProperty(Simple, name="ent") + entity = entity_module.Entity() + expected = Simple() + assert prop._from_base_type(entity) == expected + + def test__from_base_type_bytes(self): + class Simple(model.Model): + pass + + prop = model.LocalStructuredProperty(Simple, name="ent") + pb = helpers.entity_to_protobuf(entity_module.Entity())._pb + value = pb.SerializePartialToString() + expected = Simple() + assert prop._from_base_type(value) == expected + + def test__from_base_type_keep_keys(self): + class Simple(model.Model): + pass + + prop = model.LocalStructuredProperty(Simple, name="ent") + entity = entity_module.Entity() + entity.key = "key" + expected = Simple() + assert prop._from_base_type(entity) == expected + + @staticmethod + def test__prepare_for_put(): + class SubKind(model.Model): + bar = model.Property() + + class SomeKind(model.Model): + foo = model.LocalStructuredProperty(SubKind) + + entity = SomeKind(foo=SubKind()) + entity.foo._prepare_for_put = mock.Mock() + SomeKind.foo._prepare_for_put(entity) + entity.foo._prepare_for_put.assert_called_once_with() + + @staticmethod + def test__prepare_for_put_repeated(): + class SubKind(model.Model): + bar = model.Property() + + class SomeKind(model.Model): + foo = model.LocalStructuredProperty(SubKind, repeated=True) + + entity = SomeKind(foo=[SubKind(), SubKind()]) + entity.foo[0]._prepare_for_put = mock.Mock() + entity.foo[1]._prepare_for_put = mock.Mock() + SomeKind.foo._prepare_for_put(entity) + entity.foo[0]._prepare_for_put.assert_called_once_with() + entity.foo[1]._prepare_for_put.assert_called_once_with() + + @staticmethod + def test__prepare_for_put_repeated_None(): + class SubKind(model.Model): + bar = model.Property() + + class SomeKind(model.Model): + foo = model.LocalStructuredProperty(SubKind) + + entity = SomeKind() + SomeKind.foo._prepare_for_put(entity) # noop + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_repeated_local_structured_property(): + class SubKind(model.Model): + bar = model.Property() + + class SomeKind(model.Model): + foo = model.LocalStructuredProperty(SubKind, repeated=True, indexed=False) + + entity = SomeKind(foo=[SubKind(bar="baz")]) + data = {"_exclude_from_indexes": []} + protobuf = model._entity_to_protobuf(entity.foo[0], set_key=False)._pb + protobuf = protobuf.SerializePartialToString() + assert SomeKind.foo._to_datastore(entity, data, repeated=True) == ("foo",) + assert data.pop("_exclude_from_indexes") == ["foo"] + assert data == {"foo": [[protobuf]]} + + @staticmethod + def test_legacy_repeated_local_structured_property(in_context): + class SubKind(model.Model): + bar = model.Property() + + class SomeKind(model.Model): + foo = model.LocalStructuredProperty(SubKind, repeated=True, indexed=False) + + with in_context.new(legacy_data=True).use(): + entity = SomeKind(foo=[SubKind(bar="baz")]) + data = {"_exclude_from_indexes": []} + ds_entity = model._entity_to_ds_entity(entity.foo[0], set_key=False) + assert SomeKind.foo._to_datastore(entity, data, repeated=True) == ("foo",) + assert data.pop("_exclude_from_indexes") == ["foo"] + assert data == {"foo": [ds_entity]} + + @staticmethod + def test_legacy_non_repeated_local_structured_property(in_context): + class SubKind(model.Model): + bar = model.Property() + + class SomeKind(model.Model): + foo = model.LocalStructuredProperty(SubKind) + + with in_context.new(legacy_data=True).use(): + entity = SomeKind(foo=SubKind(bar="baz")) + data = {"_exclude_from_indexes": []} + assert SomeKind.foo._to_datastore(entity, data) == ("foo",) + assert data.pop("_exclude_from_indexes") == ["foo"] + ds_entity = model._entity_to_ds_entity(entity.foo, set_key=False) + assert data == {"foo": ds_entity} + + @staticmethod + def test_legacy_repeated_compressed_local_structured_property(): + class SubKind(model.Model): + bar = model.TextProperty() + + prop = model.LocalStructuredProperty(SubKind, repeated=True, compressed=True) + entity = SubKind(bar="baz") + ds_entity = model._entity_to_ds_entity(entity, set_key=False) + assert prop._call_from_base_type(ds_entity) == entity + + @staticmethod + def test_legacy_compressed_entity_local_structured_property(): + class SubKind(model.Model): + foo = model.StringProperty() + bar = model.StringProperty() + baz = model.StringProperty() + + prop = model.LocalStructuredProperty(SubKind, repeated=True, compressed=True) + entity = SubKind(foo="so", bar="much", baz="code") + compressed = b"".join( + [ + b"x\x9c+\xe2\x95bN\xcb\xcfW`\xd0b\x91b*\xce", + b"/\xe2\x97bNJ,\x02r\xd9\xa4XrK\x933 \x02U\x10", + b"\x81\xe4\xfc\x94T\x00\x08\xe1\n\xff", + ] + ) + + assert prop._call_from_base_type(compressed) == entity + + @staticmethod + def test__get_for_dict(): + class Mine(model.Model): + foo = model.StringProperty() + + class MineToo(model.Model): + bar = model.LocalStructuredProperty(Mine) + + mine = Mine(foo="Foo") + minetoo = MineToo() + minetoo.bar = mine + assert MineToo.bar._get_for_dict(minetoo) == {"foo": "Foo"} + + @staticmethod + def test__get_for_dict_repeated(): + class Mine(model.Model): + foo = model.StringProperty() + + class MineToo(model.Model): + bar = model.LocalStructuredProperty(Mine, repeated=True) + + mine = Mine(foo="Foo") + minetoo = MineToo() + minetoo.bar = [mine, mine] + assert MineToo.bar._get_for_dict(minetoo) == [ + {"foo": "Foo"}, + {"foo": "Foo"}, + ] + + @staticmethod + def test__get_for_dict_no_value(): + class Mine(model.Model): + foo = model.StringProperty() + + class MineToo(model.Model): + bar = model.LocalStructuredProperty(Mine) + + minetoo = MineToo() + minetoo.bar = None + assert MineToo.bar._get_for_dict(minetoo) is None + + @staticmethod + def test_legacy_optional_local_structured_property(in_context): + class SubKind(model.Model): + foo = model.Property() + + class ContainerB(model.Model): + child_b = model.LocalStructuredProperty(SubKind) + + class ContainerA(model.Model): + child_a = model.LocalStructuredProperty(ContainerB) + + with in_context.new(legacy_data=True).use(): + entity = ContainerA(child_a=ContainerB()) + data = {"_exclude_from_indexes": []} + assert ContainerA.child_a._to_datastore(entity, data) == ("child_a",) + assert data.pop("_exclude_from_indexes") == ["child_a"] + assert data["child_a"]["child_b"] is None + + @staticmethod + def test_local_structured_property_with_polymodel(in_context): + class Base(polymodel.PolyModel): + pass + + class SubKind(Base): + foo = model.StringProperty() + + class Container(model.Model): + child = model.LocalStructuredProperty(Base) + + entity = Container(child=SubKind(foo="bar")) + value = b"".join( + [ + b"\x1a \n\x05class\x12\x17J\x15\n\x07\x8a\x01\x04Base\n\n", + b"\x8a\x01\x07SubKind\x1a\r\n\x03foo\x12\x06\x8a\x01\x03bar", + ] + ) + + child = entity._properties["child"]._from_base_type(value) + assert child.foo == "bar" + + pb = entity_pb2.Entity() + pb._pb.MergeFromString(value) + value = helpers.entity_from_protobuf(pb) + child = model._entity_from_ds_entity(value, model_class=Base) + assert child._values["foo"].b_val == "bar" + + +class TestGenericProperty: + @staticmethod + def test_constructor(): + prop = model.GenericProperty(name="generic") + assert prop._name == "generic" + + @staticmethod + def test_constructor_compressed(): + prop = model.GenericProperty(name="generic", compressed=True) + assert prop._compressed is True + + @staticmethod + def test_constructor_compressed_and_indexed(): + with pytest.raises(NotImplementedError): + model.GenericProperty(name="generic", compressed=True, indexed=True) + + @staticmethod + def test__db_get_value(): + prop = model.GenericProperty() + + with pytest.raises(exceptions.NoLongerImplementedError): + prop._db_get_value(None, None) + + @staticmethod + def test__db_set_value(): + prop = model.GenericProperty() + + with pytest.raises(exceptions.NoLongerImplementedError): + prop._db_set_value(None, None, None) + + @staticmethod + def test__to_base_type(): + prop = model.GenericProperty(name="generic", compressed=True) + value = b"abc" * 10 + converted = prop._to_base_type(value) + + assert isinstance(converted, model._CompressedValue) + assert converted.z_val == zlib.compress(value) + + @staticmethod + def test__to_base_type_no_convert(): + prop = model.GenericProperty(name="generic") + value = b"abc" * 10 + converted = prop._to_base_type(value) + assert converted is None + + @staticmethod + def test__from_base_type(): + prop = model.GenericProperty(name="generic") + original = b"abc" * 10 + z_val = zlib.compress(original) + value = model._CompressedValue(z_val) + converted = prop._from_base_type(value) + + assert converted == original + + @staticmethod + def test__from_base_type_no_convert(): + prop = model.GenericProperty(name="generic") + converted = prop._from_base_type(b"abc") + assert converted is None + + @staticmethod + def test__validate(): + prop = model.GenericProperty(name="generic", indexed=False) + assert prop._validate(b"abc") is None + + @staticmethod + def test__validate_indexed(): + prop = model.GenericProperty(name="generic", indexed=True) + assert prop._validate(42) is None + + @staticmethod + def test__validate_indexed_bytes(): + prop = model.GenericProperty(name="generic", indexed=True) + assert prop._validate(b"abc") is None + + @staticmethod + def test__validate_indexed_unicode(): + prop = model.GenericProperty(name="generic", indexed=True) + assert prop._validate("abc") is None + + @staticmethod + def test__validate_indexed_bad_length(): + prop = model.GenericProperty(name="generic", indexed=True) + with pytest.raises(exceptions.BadValueError): + prop._validate(b"ab" * model._MAX_STRING_LENGTH) + + +class TestComputedProperty: + @staticmethod + def test_constructor(): + def lower_name(self): + return self.lower() # pragma: NO COVER + + prop = model.ComputedProperty(lower_name) + assert prop._func == lower_name + + @staticmethod + def test_repr(): + """Regression test for #256 + + https://github.com/googleapis/python-ndb/issues/256 + """ + + def lower_name(self): + return self.lower() # pragma: NO COVER + + prop = model.ComputedProperty(lower_name) + assert "lower_name" in repr(prop) + + @staticmethod + def test__set_value(): + prop = model.ComputedProperty(lambda self: self) # pragma: NO COVER + with pytest.raises(model.ComputedPropertyError): + prop._set_value(None, None) + + @staticmethod + def test__delete_value(): + prop = model.ComputedProperty(lambda self: self) # pragma: NO COVER + with pytest.raises(model.ComputedPropertyError): + prop._delete_value(None) + + @staticmethod + def test__get_value(): + prop = model.ComputedProperty(lambda self: 42) + entity = mock.Mock(_projection=None, _values={}, spec=("_projection")) + assert prop._get_value(entity) == 42 + + @staticmethod + def test__get_value_with_projection(): + prop = model.ComputedProperty( + lambda self: 42, name="computed" + ) # pragma: NO COVER + entity = mock.Mock( + _projection=["computed"], + _values={"computed": 84}, + spec=("_projection", "_values"), + ) + assert prop._get_value(entity) == 84 + + @staticmethod + def test__get_value_empty_projection(): + prop = model.ComputedProperty(lambda self: 42) + entity = mock.Mock(_projection=None, _values={}, spec=("_projection")) + prop._prepare_for_put(entity) + assert entity._values == {prop._name: 42} + + +class TestMetaModel: + @staticmethod + def test___repr__(): + expected = "Model<>" + assert repr(model.Model) == expected + + @staticmethod + def test___repr__extended(): + class Mine(model.Model): + first = model.IntegerProperty() + second = model.StringProperty() + + expected = ( + "Mine" + ) + assert repr(Mine) == expected + + @staticmethod + def test_bad_kind(): + with pytest.raises(model.KindError): + + class Mine(model.Model): + @classmethod + def _get_kind(cls): + return 525600 + + @staticmethod + def test_invalid_property_name(): + with pytest.raises(TypeError): + + class Mine(model.Model): + _foo = model.StringProperty() + + @staticmethod + def test_repeated_property(): + class Mine(model.Model): + foo = model.StringProperty(repeated=True) + + assert Mine._has_repeated + + @staticmethod + def test_non_property_attribute(): + model_attr = mock.Mock(spec=model.ModelAttribute) + + class Mine(model.Model): + baz = model_attr + + model_attr._fix_up.assert_called_once_with(Mine, "baz") + + +class TestModel: + @staticmethod + def test_constructor_defaults(): + entity = model.Model() + assert entity.__dict__ == {"_values": {}} + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_key(): + key = key_module.Key("Foo", "bar") + entity = model.Model(key=key) + assert entity.__dict__ == {"_values": {}, "_entity_key": key} + + entity = model.Model(_key=key) + assert entity.__dict__ == {"_values": {}, "_entity_key": key} + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_key_parts(): + entity = model.Model(id=124) + key = key_module.Key("Model", 124) + assert entity.__dict__ == {"_values": {}, "_entity_key": key} + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_namespace_no_key_parts(): + entity = model.Model(namespace="myspace") + key = key_module.Key("Model", None, namespace="myspace") + assert entity.__dict__ == {"_entity_key": key, "_values": {}} + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_app(): + entity = model.Model(app="thisproject") + key = key_module.Key("Model", None, project="thisproject") + assert entity.__dict__ == {"_values": {}, "_entity_key": key} + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_project(): + entity = model.Model(project="thisproject") + key = key_module.Key("Model", None, project="thisproject") + assert entity.__dict__ == {"_values": {}, "_entity_key": key} + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_app_and_project(): + with pytest.raises(exceptions.BadArgumentError): + model.Model(app="foo", project="bar") + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_key_and_key_parts(): + key = key_module.Key("Foo", "bar") + with pytest.raises(exceptions.BadArgumentError): + model.Model(key=key, id=124) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_key_and_key_parts_with_namespace(): + key = key_module.Key("Foo", "bar") + with pytest.raises(exceptions.BadArgumentError): + model.Model(key=key, namespace="myspace") + + @staticmethod + def test_constructor_user_property_collision(): + class SecretMap(model.Model): + key = model.IntegerProperty() + + entity = SecretMap(key=1001) + assert entity.__dict__ == {"_values": {"key": 1001}} + + @staticmethod + def test_constructor_with_projection(): + class Book(model.Model): + pages = model.IntegerProperty() + author = model.StringProperty() + publisher = model.StringProperty() + + entity = Book(pages=287, author="Tim Robert", projection=("pages", "author")) + assert entity.__dict__ == { + "_values": {"pages": 287, "author": "Tim Robert"}, + "_projection": ("pages", "author"), + } + + @staticmethod + def test_constructor_with_structured_property_projection(): + class Author(model.Model): + first_name = model.StringProperty() + last_name = model.StringProperty() + + class Book(model.Model): + pages = model.IntegerProperty() + author = model.StructuredProperty(Author) + publisher = model.StringProperty() + + entity = Book( + pages=287, + author=Author(first_name="Tim", last_name="Robert"), + projection=("author.first_name", "author.last_name"), + ) + assert entity._projection == ("author.first_name", "author.last_name") + assert entity.author._projection == ("first_name", "last_name") + + @staticmethod + def test_constructor_with_repeated_structured_property_projection(): + class Author(model.Model): + first_name = model.StringProperty() + last_name = model.StringProperty() + + class Book(model.Model): + pages = model.IntegerProperty() + authors = model.StructuredProperty(Author, repeated=True) + publisher = model.StringProperty() + + entity = Book( + pages=287, + authors=[ + Author(first_name="Tim", last_name="Robert"), + Author(first_name="Jim", last_name="Bobert"), + ], + projection=("authors.first_name", "authors.last_name"), + ) + assert entity._projection == ( + "authors.first_name", + "authors.last_name", + ) + assert entity.authors[0]._projection == ("first_name", "last_name") + + @staticmethod + def test_constructor_non_existent_property(): + with pytest.raises(AttributeError): + model.Model(pages=287) + + @staticmethod + def test_constructor_non_property(): + class TimeTravelVehicle(model.Model): + speed = 88 + + with pytest.raises(TypeError): + TimeTravelVehicle(speed=28) + + @staticmethod + def test_repr(): + ManyFields = ManyFieldsFactory() + entity = ManyFields(self=909, id="hi", key=[88.5, 0.0], value=None) + expected = "ManyFields(id='hi', key=[88.5, 0.0], self=909, value=None)" + assert repr(entity) == expected + + @staticmethod + def test_repr_with_projection(): + ManyFields = ManyFieldsFactory() + entity = ManyFields( + self=909, + id="hi", + key=[88.5, 0.0], + value=None, + projection=("self", "id"), + ) + expected = ( + "ManyFields(id='hi', key=[88.5, 0.0], self=909, value=None, " + "_projection=('self', 'id'))" + ) + assert repr(entity) == expected + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_repr_with_property_named_key(): + ManyFields = ManyFieldsFactory() + entity = ManyFields(self=909, id="hi", key=[88.5, 0.0], value=None, _id=78) + expected = ( + "ManyFields(_key=Key('ManyFields', 78), id='hi', key=[88.5, 0.0], " + "self=909, value=None)" + ) + assert repr(entity) == expected + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_repr_with_property_named_key_not_set(): + ManyFields = ManyFieldsFactory() + entity = ManyFields(self=909, id="hi", value=None, _id=78) + expected = ( + "ManyFields(_key=Key('ManyFields', 78), id='hi', " "self=909, value=None)" + ) + assert repr(entity) == expected + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_repr_no_property_named_key(): + class NoKeyCollision(model.Model): + word = model.StringProperty() + + entity = NoKeyCollision(word="one", id=801) + expected = "NoKeyCollision(key=Key('NoKeyCollision', 801), word='one')" + assert repr(entity) == expected + + @staticmethod + def test__get_kind(): + assert model.Model._get_kind() == "Model" + + class Simple(model.Model): + pass + + assert Simple._get_kind() == "Simple" + + @staticmethod + def test__class_name(): + assert model.Model._class_name() == "Model" + + class Simple(model.Model): + pass + + assert Simple._class_name() == "Simple" + + @staticmethod + def test__default_filters(): + assert model.Model._default_filters() == () + + class Simple(model.Model): + pass + + assert Simple._default_filters() == () + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test___hash__(): + ManyFields = ManyFieldsFactory() + entity = ManyFields(self=909, id="hi", value=None, _id=78) + with pytest.raises(TypeError): + hash(entity) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test___eq__wrong_type(): + class Simple(model.Model): + pass + + ManyFields = ManyFieldsFactory() + entity1 = ManyFields(self=909, id="hi", value=None, _id=78) + entity2 = Simple() + assert not entity1 == entity2 + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test___eq__wrong_key(): + ManyFields = ManyFieldsFactory() + entity1 = ManyFields(_id=78) + entity2 = ManyFields(_id="seventy-eight") + assert not entity1 == entity2 + + @staticmethod + def test___eq__wrong_projection(): + ManyFields = ManyFieldsFactory() + entity1 = ManyFields(self=90, projection=("self",)) + entity2 = ManyFields(value="a", unused=0.0, projection=("value", "unused")) + assert not entity1 == entity2 + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test___eq__same_type_same_key(): + ManyFields = ManyFieldsFactory() + entity1 = ManyFields(self=909, id="hi", _id=78) + entity2 = ManyFields(self=909, id="bye", _id=78) + assert entity1 == entity1 + assert not entity1 == entity2 + + @staticmethod + def test___eq__same_type_same_key_same_projection(): + ManyFields = ManyFieldsFactory() + entity1 = ManyFields(self=-9, id="hi", projection=("self", "id")) + entity2 = ManyFields(self=-9, id="bye", projection=("self", "id")) + assert entity1 == entity1 + assert not entity1 == entity2 + + @staticmethod + def test__eq__expando_w_different_number_of_properties(): + class SomeKind(model.Expando): + foo = model.IntegerProperty() + + entity1 = SomeKind(foo=1) + entity2 = SomeKind(foo=1, bar=2) + + assert not entity1 == entity2 + + @staticmethod + def test__eq__expando_w_different_properties(): + class SomeKind(model.Expando): + foo = model.IntegerProperty() + + entity1 = SomeKind(foo=1, bar=2) + entity2 = SomeKind(foo=1, baz=3) + + assert not entity1 == entity2 + + @staticmethod + def test__eq__expando(): + class SomeKind(model.Expando): + foo = model.IntegerProperty() + + entity1 = SomeKind(foo=1, bar=2) + entity2 = SomeKind(foo=1, bar=2) + + assert entity1 == entity2 + + @staticmethod + def test__eq__structured_property(): + class OtherKind(model.Model): + bar = model.IntegerProperty() + + class SomeKind(model.Model): + foo = model.StructuredProperty(OtherKind) + hi = model.StringProperty() + + entity1 = SomeKind(hi="mom", foo=OtherKind(bar=42)) + entity2 = SomeKind(hi="mom", foo=OtherKind(bar=42)) + + assert entity1 == entity2 + + @staticmethod + def test__eq__structured_property_differs(): + class OtherKind(model.Model): + bar = model.IntegerProperty() + + class SomeKind(model.Model): + foo = model.StructuredProperty(OtherKind) + hi = model.StringProperty() + + entity1 = SomeKind(hi="mom", foo=OtherKind(bar=42)) + entity2 = SomeKind(hi="mom", foo=OtherKind(bar=43)) + + assert not entity1 == entity2 + + @staticmethod + def test__eq__repeated_structured_property(): + class OtherKind(model.Model): + bar = model.IntegerProperty() + + class SomeKind(model.Model): + foo = model.StructuredProperty(OtherKind, repeated=True) + hi = model.StringProperty() + + entity1 = SomeKind(hi="mom", foo=[OtherKind(bar=42)]) + entity2 = SomeKind(hi="mom", foo=[OtherKind(bar=42)]) + + assert entity1 == entity2 + + @staticmethod + def test__eq__repeated_structured_property_differs(): + class OtherKind(model.Model): + bar = model.IntegerProperty() + + class SomeKind(model.Model): + foo = model.StructuredProperty(OtherKind, repeated=True) + hi = model.StringProperty() + + entity1 = SomeKind(hi="mom", foo=[OtherKind(bar=42)]) + entity2 = SomeKind(hi="mom", foo=[OtherKind(bar=42), OtherKind(bar=43)]) + + assert not entity1 == entity2 + + @staticmethod + def test___ne__(): + class Simple(model.Model): + pass + + ManyFields = ManyFieldsFactory() + entity1 = ManyFields(self=-9, id="hi") + entity2 = Simple() + entity3 = ManyFields(self=-9, id="bye") + entity4 = ManyFields(self=-9, id="bye", projection=("self", "id")) + entity5 = None + entity6 = ManyFields(self=-9, id="hi") + assert not entity1 != entity1 + assert entity1 != entity2 + assert entity1 != entity3 + assert entity1 != entity4 + assert entity1 != entity5 + assert not entity1 != entity6 + + @staticmethod + def test___lt__(): + ManyFields = ManyFieldsFactory() + entity = ManyFields(self=-9, id="hi") + with pytest.raises(TypeError): + entity < entity + + @staticmethod + def test___le__(): + ManyFields = ManyFieldsFactory() + entity = ManyFields(self=-9, id="hi") + with pytest.raises(TypeError): + entity <= entity + + @staticmethod + def test___gt__(): + ManyFields = ManyFieldsFactory() + entity = ManyFields(self=-9, id="hi") + with pytest.raises(TypeError): + entity > entity + + @staticmethod + def test___ge__(): + ManyFields = ManyFieldsFactory() + entity = ManyFields(self=-9, id="hi") + with pytest.raises(TypeError): + entity >= entity + + @staticmethod + def test__validate_key(): + value = mock.sentinel.value + assert model.Model._validate_key(value) is value + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_api") + def test__put_no_key(_datastore_api): + entity = model.Model() + _datastore_api.put.return_value = future = tasklets.Future() + future.set_result(None) + + ds_entity = model._entity_to_ds_entity(entity) + assert entity._put() == entity.key + + # Can't do a simple "assert_called_once_with" here because entities' + # keys will fail test for equality because Datastore's Key.__eq__ + # method returns False if either key is partial, regardless of whether + # they're effectively equal or not. Have to do this more complicated + # unpacking instead. + assert _datastore_api.put.call_count == 1 + call_ds_entity, call_options = _datastore_api.put.call_args[0] + assert call_ds_entity.key.path == ds_entity.key.path + assert call_ds_entity.items() == ds_entity.items() + assert call_options == _options.Options() + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_api") + def test__put_w_key_no_cache(_datastore_api, in_context): + entity = model.Model() + _datastore_api.put.return_value = future = tasklets.Future() + + key = key_module.Key("SomeKind", 123) + future.set_result(key._key) + + ds_entity = model._entity_to_ds_entity(entity) + assert entity._put(use_cache=False) == key + assert not in_context.cache + + # Can't do a simple "assert_called_once_with" here because entities' + # keys will fail test for equality because Datastore's Key.__eq__ + # method returns False if either key is partial, regardless of whether + # they're effectively equal or not. Have to do this more complicated + # unpacking instead. + assert _datastore_api.put.call_count == 1 + call_ds_entity, call_options = _datastore_api.put.call_args[0] + assert call_ds_entity.key.path == ds_entity.key.path + assert call_ds_entity.items() == ds_entity.items() + assert call_options == _options.Options(use_cache=False) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_api") + def test__put_w_key_with_cache(_datastore_api, in_context): + entity = model.Model() + _datastore_api.put.return_value = future = tasklets.Future() + + key = key_module.Key("SomeKind", 123) + future.set_result(key._key) + + ds_entity = model._entity_to_ds_entity(entity) + assert entity._put(use_cache=True) == key + assert in_context.cache[key] == entity + assert in_context.cache.get_and_validate(key) == entity + + # Can't do a simple "assert_called_once_with" here because entities' + # keys will fail test for equality because Datastore's Key.__eq__ + # method returns False if either key is partial, regardless of whether + # they're effectively equal or not. Have to do this more complicated + # unpacking instead. + assert _datastore_api.put.call_count == 1 + call_ds_entity, call_options = _datastore_api.put.call_args[0] + assert call_ds_entity.key.path == ds_entity.key.path + assert call_ds_entity.items() == ds_entity.items() + assert call_options == _options.Options(use_cache=True) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_api") + def test__put_w_key(_datastore_api): + entity = model.Model() + _datastore_api.put.return_value = future = tasklets.Future() + + key = key_module.Key("SomeKind", 123) + future.set_result(key._key) + + ds_entity = model._entity_to_ds_entity(entity) + assert entity._put() == key + + # Can't do a simple "assert_called_once_with" here because entities' + # keys will fail test for equality because Datastore's Key.__eq__ + # method returns False if either key is partial, regardless of whether + # they're effectively equal or not. Have to do this more complicated + # unpacking instead. + assert _datastore_api.put.call_count == 1 + call_ds_entity, call_options = _datastore_api.put.call_args[0] + assert call_ds_entity.key.path == ds_entity.key.path + assert call_ds_entity.items() == ds_entity.items() + assert call_options == _options.Options() + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_api") + def test__put_async(_datastore_api): + entity = model.Model() + _datastore_api.put.return_value = future = tasklets.Future() + + key = key_module.Key("SomeKind", 123) + future.set_result(key._key) + + ds_entity = model._entity_to_ds_entity(entity) + tasklet_future = entity._put_async() + assert tasklet_future.result() == key + + # Can't do a simple "assert_called_once_with" here because entities' + # keys will fail test for equality because Datastore's Key.__eq__ + # method returns False if either key is partial, regardless of whether + # they're effectively equal or not. Have to do this more complicated + # unpacking instead. + assert _datastore_api.put.call_count == 1 + call_ds_entity, call_options = _datastore_api.put.call_args[0] + assert call_ds_entity.key.path == ds_entity.key.path + assert call_ds_entity.items() == ds_entity.items() + assert call_options == _options.Options() + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test__prepare_for_put(): + class Simple(model.Model): + foo = model.DateTimeProperty() + + entity = Simple(foo=datetime.datetime.now()) + with mock.patch.object( + entity._properties["foo"], "_prepare_for_put" + ) as patched: + entity._prepare_for_put() + patched.assert_called_once() + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_api") + def test__put_w_hooks(_datastore_api): + class Simple(model.Model): + def __init__(self): + super(Simple, self).__init__() + self.pre_put_calls = [] + self.post_put_calls = [] + + def _pre_put_hook(self, *args, **kwargs): + self.pre_put_calls.append((args, kwargs)) + + def _post_put_hook(self, future, *args, **kwargs): + assert isinstance(future, tasklets.Future) + self.post_put_calls.append((args, kwargs)) + + entity = Simple() + _datastore_api.put.return_value = future = tasklets.Future() + future.set_result(None) + + ds_entity = model._entity_to_ds_entity(entity) + assert entity._put() == entity.key + + # Can't do a simple "assert_called_once_with" here because entities' + # keys will fail test for equality because Datastore's Key.__eq__ + # method returns False if either key is partial, regardless of whether + # they're effectively equal or not. Have to do this more complicated + # unpacking instead. + assert _datastore_api.put.call_count == 1 + call_ds_entity, call_options = _datastore_api.put.call_args[0] + assert call_ds_entity.key.path == ds_entity.key.path + assert call_ds_entity.items() == ds_entity.items() + assert call_options == _options.Options() + + assert entity.pre_put_calls == [((), {})] + assert entity.post_put_calls == [((), {})] + + @staticmethod + def test__lookup_model(): + class ThisKind(model.Model): + pass + + assert model.Model._lookup_model("ThisKind") is ThisKind + + @staticmethod + def test__lookup_model_use_default(): + sentinel = object() + assert model.Model._lookup_model("NoKind", sentinel) is sentinel + + @staticmethod + def test__lookup_model_not_found(): + with pytest.raises(model.KindError): + model.Model._lookup_model("NoKind") + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test__check_properties(): + class XModel(model.Model): + x = model.IntegerProperty() + + properties = ["x"] + assert XModel._check_properties(properties) is None + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test__check_properties_with_sub(): + class XModel(model.Model): + x = model.IntegerProperty() + + properties = ["x.x"] + # Will raise error until model.StructuredProperty is implemented + with pytest.raises(model.InvalidPropertyError): + XModel._check_properties(properties) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test__check_properties_not_found(): + properties = ["x"] + with pytest.raises(model.InvalidPropertyError): + model.Model._check_properties(properties) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_query(): + class XModel(model.Model): + x = model.IntegerProperty() + + query = XModel.query(XModel.x == 42) + assert query.kind == "XModel" + assert query.filters == (XModel.x == 42) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_query_distinct(): + class XModel(model.Model): + x = model.IntegerProperty() + + query = XModel.query(distinct=True, projection=("x",)) + assert query.distinct_on == ("x",) + + @staticmethod + def test_query_distinct_no_projection(): + class XModel(model.Model): + x = model.IntegerProperty() + + with pytest.raises(TypeError): + XModel.query(distinct=True) + + @staticmethod + def test_query_distinct_w_distinct_on(): + class XModel(model.Model): + x = model.IntegerProperty() + + with pytest.raises(TypeError): + XModel.query(distinct=True, distinct_on=("x",)) + + @staticmethod + def test_query_distinct_w_group_by(): + class XModel(model.Model): + x = model.IntegerProperty() + + with pytest.raises(TypeError): + XModel.query(distinct=True, group_by=("x",)) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_query_projection_of_unindexed_attribute(): + class XModel(model.Model): + x = model.IntegerProperty(indexed=False) + + with pytest.raises(model.InvalidPropertyError): + XModel.query(projection=["x"]) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_gql(): + class Simple(model.Model): + x = model.IntegerProperty() + + query = Simple.gql("WHERE x=1") + assert isinstance(query, query_module.Query) + assert query.kind == "Simple" + assert query.filters == query_module.FilterNode("x", "=", 1) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_gql_binding(): + class Simple(model.Model): + x = model.IntegerProperty() + y = model.StringProperty() + + query = Simple.gql("WHERE x=:1 and y=:foo", 2, foo="bar") + assert isinstance(query, query_module.Query) + assert query.kind == "Simple" + assert query.filters == query_module.AND( + query_module.FilterNode("x", "=", 2), + query_module.FilterNode("y", "=", "bar"), + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_api") + def test_allocate_ids(_datastore_api): + completed = [ + entity_pb2.Key( + partition_id=entity_pb2.PartitionId(project_id="testing"), + path=[entity_pb2.Key.PathElement(kind="Simple", id=21)], + ), + entity_pb2.Key( + partition_id=entity_pb2.PartitionId(project_id="testing"), + path=[entity_pb2.Key.PathElement(kind="Simple", id=42)], + ), + ] + _datastore_api.allocate.return_value = utils.future_result(completed) + + class Simple(model.Model): + pass + + keys = Simple.allocate_ids(2) + assert keys == ( + key_module.Key("Simple", 21), + key_module.Key("Simple", 42), + ) + + call_keys, call_options = _datastore_api.allocate.call_args[0] + call_keys = [key_module.Key._from_ds_key(key) for key in call_keys] + assert call_keys == [ + key_module.Key("Simple", None), + key_module.Key("Simple", None), + ] + assert call_options == _options.Options() + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_api") + def test_allocate_ids_w_hooks(_datastore_api): + completed = [ + entity_pb2.Key( + partition_id=entity_pb2.PartitionId(project_id="testing"), + path=[entity_pb2.Key.PathElement(kind="Simple", id=21)], + ), + entity_pb2.Key( + partition_id=entity_pb2.PartitionId(project_id="testing"), + path=[entity_pb2.Key.PathElement(kind="Simple", id=42)], + ), + ] + _datastore_api.allocate.return_value = utils.future_result(completed) + + class Simple(model.Model): + pre_allocate_id_calls = [] + post_allocate_id_calls = [] + + @classmethod + def _pre_allocate_ids_hook(cls, *args, **kwargs): + cls.pre_allocate_id_calls.append((args, kwargs)) + + @classmethod + def _post_allocate_ids_hook( + cls, size, max, parent, future, *args, **kwargs + ): + assert isinstance(future, tasklets.Future) + cls.post_allocate_id_calls.append(((size, max, parent) + args, kwargs)) + + keys = Simple.allocate_ids(2) + assert keys == ( + key_module.Key("Simple", 21), + key_module.Key("Simple", 42), + ) + + call_keys, call_options = _datastore_api.allocate.call_args[0] + call_keys = [key_module.Key._from_ds_key(key) for key in call_keys] + assert call_keys == [ + key_module.Key("Simple", None), + key_module.Key("Simple", None), + ] + assert call_options == _options.Options() + + assert Simple.pre_allocate_id_calls == [((2, None, None), {})] + assert Simple.post_allocate_id_calls == [((2, None, None), {})] + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_allocate_ids_with_max(): + class Simple(model.Model): + pass + + with pytest.raises(NotImplementedError): + Simple.allocate_ids(max=6) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_allocate_ids_no_args(): + class Simple(model.Model): + pass + + with pytest.raises(TypeError): + Simple.allocate_ids() + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_api") + def test_allocate_ids_async(_datastore_api): + completed = [ + entity_pb2.Key( + partition_id=entity_pb2.PartitionId(project_id="testing"), + path=[entity_pb2.Key.PathElement(kind="Simple", id=21)], + ), + entity_pb2.Key( + partition_id=entity_pb2.PartitionId(project_id="testing"), + path=[entity_pb2.Key.PathElement(kind="Simple", id=42)], + ), + ] + _datastore_api.allocate.return_value = utils.future_result(completed) + + class Simple(model.Model): + pass + + future = Simple.allocate_ids_async(2) + keys = future.result() + assert keys == ( + key_module.Key("Simple", 21), + key_module.Key("Simple", 42), + ) + + call_keys, call_options = _datastore_api.allocate.call_args[0] + call_keys = [key_module.Key._from_ds_key(key) for key in call_keys] + assert call_keys == [ + key_module.Key("Simple", None), + key_module.Key("Simple", None), + ] + assert call_options == _options.Options() + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb.model.key_module") + def test_get_by_id(key_module): + entity = object() + key = key_module.Key.return_value + key.get_async.return_value = utils.future_result(entity) + + class Simple(model.Model): + pass + + assert Simple.get_by_id(1) is entity + key_module.Key.assert_called_once_with("Simple", 1, parent=None) + key.get_async.assert_called_once_with(_options=_options.ReadOptions()) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb.model.key_module") + def test_get_by_id_w_parent_project_namespace(key_module): + entity = object() + key = key_module.Key.return_value + key.get_async.return_value = utils.future_result(entity) + + class Simple(model.Model): + pass + + assert ( + Simple.get_by_id(1, parent="foo", project="baz", namespace="bar") is entity + ) + + key_module.Key.assert_called_once_with( + "Simple", 1, parent="foo", namespace="bar", app="baz" + ) + + key.get_async.assert_called_once_with(_options=_options.ReadOptions()) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb.model.key_module") + def test_get_by_id_w_default_namespace(key_module): + entity = object() + key = key_module.Key.return_value + key.get_async.return_value = utils.future_result(entity) + + class Simple(model.Model): + pass + + assert Simple.get_by_id(1, namespace="") is entity + + key_module.Key.assert_called_once_with("Simple", 1, namespace="", parent=None) + + key.get_async.assert_called_once_with(_options=_options.ReadOptions()) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb.model.key_module") + def test_get_by_id_w_app(key_module): + entity = object() + key = key_module.Key.return_value + key.get_async.return_value = utils.future_result(entity) + + class Simple(model.Model): + pass + + assert Simple.get_by_id(1, app="baz") is entity + + key_module.Key.assert_called_once_with("Simple", 1, parent=None, app="baz") + + key.get_async.assert_called_once_with(_options=_options.ReadOptions()) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_get_by_id_w_app_and_project(): + class Simple(model.Model): + pass + + with pytest.raises(TypeError): + Simple.get_by_id(1, app="baz", project="bar") + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb.model.key_module") + def test_get_by_id_async(key_module): + entity = object() + key = key_module.Key.return_value + key.get_async.return_value = utils.future_result(entity) + + class Simple(model.Model): + pass + + future = Simple.get_by_id_async(1) + assert future.result() is entity + + key_module.Key.assert_called_once_with("Simple", 1, parent=None) + + key.get_async.assert_called_once_with(_options=_options.ReadOptions()) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb.model.key_module") + def test_get_or_insert_get(key_module): + entity = object() + key = key_module.Key.return_value + key.get_async.return_value = utils.future_result(entity) + + class Simple(model.Model): + foo = model.IntegerProperty() + + assert Simple.get_or_insert("one", foo=42) is entity + + key_module.Key.assert_called_once_with("Simple", "one", parent=None) + + key.get_async.assert_called_once_with(_options=_options.ReadOptions()) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb.model.key_module") + def test_get_or_insert_get_w_app(key_module): + entity = object() + key = key_module.Key.return_value + key.get_async.return_value = utils.future_result(entity) + + class Simple(model.Model): + foo = model.IntegerProperty() + + assert Simple.get_or_insert("one", foo=42, app="himom") is entity + + key_module.Key.assert_called_once_with( + "Simple", "one", parent=None, app="himom" + ) + + key.get_async.assert_called_once_with(_options=_options.ReadOptions()) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb.model.key_module") + def test_get_or_insert_get_w_namespace(key_module): + entity = object() + key = key_module.Key.return_value + key.get_async.return_value = utils.future_result(entity) + + class Simple(model.Model): + foo = model.IntegerProperty() + + assert Simple.get_or_insert("one", foo=42, namespace="himom") is entity + + key_module.Key.assert_called_once_with( + "Simple", "one", parent=None, namespace="himom" + ) + + key.get_async.assert_called_once_with(_options=_options.ReadOptions()) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb.model.key_module") + def test_get_or_insert_get_w_default_namespace(key_module): + entity = object() + key = key_module.Key.return_value + key.get_async.return_value = utils.future_result(entity) + + class Simple(model.Model): + foo = model.IntegerProperty() + + assert Simple.get_or_insert("one", foo=0, namespace="") is entity + + key_module.Key.assert_called_once_with( + "Simple", "one", parent=None, namespace="" + ) + + key.get_async.assert_called_once_with(_options=_options.ReadOptions()) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_get_or_insert_get_w_app_and_project(): + class Simple(model.Model): + foo = model.IntegerProperty() + + with pytest.raises(TypeError): + Simple.get_or_insert("one", foo=42, app="himom", project="hidad") + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_get_or_insert_get_w_id_instead_of_name(): + class Simple(model.Model): + foo = model.IntegerProperty() + + with pytest.raises(TypeError): + Simple.get_or_insert(1, foo=42) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_get_or_insert_get_w_empty_name(): + class Simple(model.Model): + foo = model.IntegerProperty() + + with pytest.raises(TypeError): + Simple.get_or_insert("", foo=42) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb.model._transaction") + @mock.patch("google.cloud.ndb.model.key_module") + def test_get_or_insert_insert_in_transaction(patched_key_module, _transaction): + class MockKey(key_module.Key): + get_async = mock.Mock(return_value=utils.future_result(None)) + + patched_key_module.Key = MockKey + + class Simple(model.Model): + foo = model.IntegerProperty() + + put_async = mock.Mock(return_value=utils.future_result(None)) + + _transaction.in_transaction.return_value = True + + entity = Simple.get_or_insert("one", foo=42) + assert entity.foo == 42 + assert entity._key == MockKey("Simple", "one") + entity.put_async.assert_called_once_with(_options=_options.ReadOptions()) + + entity._key.get_async.assert_called_once_with(_options=_options.ReadOptions()) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb.model._transaction") + @mock.patch("google.cloud.ndb.model.key_module") + def test_get_or_insert_insert_not_in_transaction(patched_key_module, _transaction): + class MockKey(key_module.Key): + get_async = mock.Mock(return_value=utils.future_result(None)) + + patched_key_module.Key = MockKey + + class Simple(model.Model): + foo = model.IntegerProperty() + + put_async = mock.Mock(return_value=utils.future_result(None)) + + _transaction.in_transaction.return_value = False + _transaction.transaction_async = lambda f: f() + + entity = Simple.get_or_insert("one", foo=42) + assert entity.foo == 42 + assert entity._key == MockKey("Simple", "one") + entity.put_async.assert_called_once_with(_options=_options.ReadOptions()) + + entity._key.get_async.assert_called_once_with(_options=_options.ReadOptions()) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb.model._transaction") + @mock.patch("google.cloud.ndb.model.key_module") + def test_get_or_insert_insert_model_has_name_and_parent_properties( + patched_key_module, _transaction + ): + class MockKey(key_module.Key): + get_async = mock.Mock(return_value=utils.future_result(None)) + + patched_key_module.Key = MockKey + + class Simple(model.Model): + parent = model.IntegerProperty() + name = model.StringProperty() + + put_async = mock.Mock(return_value=utils.future_result(None)) + + _transaction.in_transaction.return_value = False + _transaction.transaction_async = lambda f: f() + + entity = Simple.get_or_insert("one", parent=42, name="Priscilla") + assert entity.parent == 42 + assert entity.name == "Priscilla" + assert entity._key == MockKey("Simple", "one") + entity.put_async.assert_called_once_with(_options=_options.ReadOptions()) + + entity._key.get_async.assert_called_once_with(_options=_options.ReadOptions()) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb.model._transaction") + @mock.patch("google.cloud.ndb.model.key_module") + def test_get_or_insert_w_parent_insert_model_has_name_and_parent_properties( + patched_key_module, _transaction + ): + parent_key = key_module.Key("SomeKind", "parent_name") + + class MockKey(key_module.Key): + get_async = mock.Mock(return_value=utils.future_result(None)) + + patched_key_module.Key = MockKey + + class Simple(model.Model): + parent = model.IntegerProperty() + name = model.StringProperty() + + put_async = mock.Mock(return_value=utils.future_result(None)) + + _transaction.in_transaction.return_value = False + _transaction.transaction_async = lambda f: f() + + entity = Simple.get_or_insert( + "one", _parent=parent_key, parent=42, name="Priscilla" + ) + assert entity.parent == 42 + assert entity.name == "Priscilla" + assert entity._key == MockKey("SomeKind", "parent_name", "Simple", "one") + entity.put_async.assert_called_once_with(_options=_options.ReadOptions()) + + entity._key.get_async.assert_called_once_with(_options=_options.ReadOptions()) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb.model._transaction") + @mock.patch("google.cloud.ndb.model.key_module") + def test_get_or_insert_insert_model_has_timeout_property( + patched_key_module, _transaction + ): + class MockKey(key_module.Key): + get_async = mock.Mock(return_value=utils.future_result(None)) + + patched_key_module.Key = MockKey + + class Simple(model.Model): + timeout = model.IntegerProperty() + + put_async = mock.Mock(return_value=utils.future_result(None)) + + _transaction.in_transaction.return_value = False + _transaction.transaction_async = lambda f: f() + + entity = Simple.get_or_insert("one", timeout=42) + assert entity.timeout == 42 + assert entity._key == MockKey("Simple", "one") + entity.put_async.assert_called_once_with(_options=_options.ReadOptions()) + + entity._key.get_async.assert_called_once_with(_options=_options.ReadOptions()) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb.model._transaction") + @mock.patch("google.cloud.ndb.model.key_module") + def test_get_or_insert_insert_with_timeout_model_has_timeout_property( + patched_key_module, _transaction + ): + class MockKey(key_module.Key): + get_async = mock.Mock(return_value=utils.future_result(None)) + + patched_key_module.Key = MockKey + + class Simple(model.Model): + timeout = model.IntegerProperty() + + put_async = mock.Mock(return_value=utils.future_result(None)) + + _transaction.in_transaction.return_value = False + _transaction.transaction_async = lambda f: f() + + entity = Simple.get_or_insert("one", _timeout=60, timeout=42) + assert entity.timeout == 42 + assert entity._key == MockKey("Simple", "one") + entity.put_async.assert_called_once_with( + _options=_options.ReadOptions(timeout=60) + ) + entity._key.get_async.assert_called_once_with( + _options=_options.ReadOptions(timeout=60) + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb.model.key_module") + def test_get_or_insert_async(key_module): + entity = object() + key = key_module.Key.return_value + key.get_async.return_value = utils.future_result(entity) + + class Simple(model.Model): + foo = model.IntegerProperty() + + future = Simple.get_or_insert_async("one", foo=42) + assert future.result() is entity + + key_module.Key.assert_called_once_with("Simple", "one", parent=None) + + key.get_async.assert_called_once_with(_options=_options.ReadOptions()) + + @staticmethod + def test_populate(): + class Simple(model.Model): + foo = model.IntegerProperty() + bar = model.StringProperty() + + entity = Simple() + entity.populate(foo=3, bar="baz") + + assert entity.foo == 3 + assert entity.bar == "baz" + + @staticmethod + def test_has_complete_key_no_key(): + class Simple(model.Model): + pass + + entity = Simple() + assert not entity.has_complete_key() + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_has_complete_key_incomplete_key(): + class Simple(model.Model): + pass + + entity = Simple(key=key_module.Key("Simple", None)) + assert not entity.has_complete_key() + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_has_complete_key_complete_with_id(): + class Simple(model.Model): + pass + + entity = Simple(id="happiness") + assert entity.has_complete_key() + + @staticmethod + def test_to_dict(): + class Simple(model.Model): + foo = model.IntegerProperty() + bar = model.StringProperty() + + entity = Simple(foo=3, bar="baz") + assert entity.to_dict() == {"foo": 3, "bar": "baz"} + + @staticmethod + def test_to_dict_with_include(): + class Simple(model.Model): + foo = model.IntegerProperty() + bar = model.StringProperty() + + entity = Simple(foo=3, bar="baz") + assert entity.to_dict(include={"foo"}) == {"foo": 3} + + @staticmethod + def test_to_dict_with_exclude(): + class Simple(model.Model): + foo = model.IntegerProperty() + bar = model.StringProperty() + + entity = Simple(foo=3, bar="baz") + assert entity.to_dict(exclude=("bar",)) == {"foo": 3} + + @staticmethod + def test_to_dict_with_projection(): + class Simple(model.Model): + foo = model.IntegerProperty() + bar = model.StringProperty() + + entity = Simple(foo=3, bar="baz", projection=("foo",)) + assert entity.to_dict() == {"foo": 3} + + @staticmethod + def test__code_name_from_stored_name(): + class Simple(model.Model): + foo = model.StringProperty() + bar = model.StringProperty(name="notbar") + + assert Simple._code_name_from_stored_name("foo") == "foo" + assert Simple._code_name_from_stored_name("notbar") == "bar" + + +class Test_entity_from_protobuf: + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_standard_case(): + class ThisKind(model.Model): + a = model.IntegerProperty() + b = model.BooleanProperty() + c = model.PickleProperty() + d = model.StringProperty(repeated=True) + e = model.PickleProperty(repeated=True) + notaproperty = True + + dill = {"sandwiches": ["turkey", "reuben"], "not_sandwiches": "tacos"} + gherkin = [{"a": {"b": "c"}, "d": 0}, [1, 2, 3], "himom"] + key = datastore.Key("ThisKind", 123, project="testing") + datastore_entity = datastore.Entity(key=key) + datastore_entity.update( + { + "a": 42, + "b": None, + "c": pickle.dumps(gherkin, pickle.HIGHEST_PROTOCOL), + "d": ["foo", "bar", "baz"], + "e": [ + pickle.dumps(gherkin, pickle.HIGHEST_PROTOCOL), + pickle.dumps(dill, pickle.HIGHEST_PROTOCOL), + ], + "notused": 32, + "notaproperty": None, + } + ) + protobuf = helpers.entity_to_protobuf(datastore_entity) + entity = model._entity_from_protobuf(protobuf) + assert isinstance(entity, ThisKind) + assert entity.a == 42 + assert entity.b is None + assert entity.c == gherkin + assert entity.d == ["foo", "bar", "baz"] + assert entity.e == [gherkin, dill] + assert entity._key == key_module.Key("ThisKind", 123, app="testing") + assert entity.notaproperty is True + + @staticmethod + def test_property_named_key(): + class ThisKind(model.Model): + key = model.StringProperty() + + key = datastore.Key("ThisKind", 123, project="testing") + datastore_entity = datastore.Entity(key=key) + datastore_entity.update({"key": "luck"}) + protobuf = helpers.entity_to_protobuf(datastore_entity) + entity = model._entity_from_protobuf(protobuf) + assert isinstance(entity, ThisKind) + assert entity.key == "luck" + assert entity._key.kind() == "ThisKind" + assert entity._key.id() == 123 + + @staticmethod + def test_expando_property(): + class ThisKind(model.Expando): + key = model.StringProperty() + + key = datastore.Key("ThisKind", 123, project="testing") + datastore_entity = datastore.Entity(key=key) + datastore_entity.update({"key": "luck", "expando_prop": "good"}) + protobuf = helpers.entity_to_protobuf(datastore_entity) + entity = model._entity_from_protobuf(protobuf) + assert isinstance(entity, ThisKind) + assert entity.key == "luck" + assert entity._key.kind() == "ThisKind" + assert entity._key.id() == 123 + assert entity.expando_prop == "good" + + @staticmethod + def test_expando_property_list_value(): + class ThisKind(model.Expando): + key = model.StringProperty() + + key = datastore.Key("ThisKind", 123, project="testing") + datastore_entity = datastore.Entity(key=key) + datastore_entity.update({"key": "luck", "expando_prop": ["good"]}) + protobuf = helpers.entity_to_protobuf(datastore_entity) + entity = model._entity_from_protobuf(protobuf) + assert isinstance(entity, ThisKind) + assert entity.key == "luck" + assert entity._key.kind() == "ThisKind" + assert entity._key.id() == 123 + assert entity.expando_prop == ["good"] + + @staticmethod + def test_value_but_non_expando_property(): + class ThisKind(model.Model): + key = model.StringProperty() + + key = datastore.Key("ThisKind", 123, project="testing") + datastore_entity = datastore.Entity(key=key) + datastore_entity.update({"key": "luck", "expando_prop": None}) + protobuf = helpers.entity_to_protobuf(datastore_entity) + entity = model._entity_from_protobuf(protobuf) + assert isinstance(entity, ThisKind) + assert entity.key == "luck" + assert entity._key.kind() == "ThisKind" + assert entity._key.id() == 123 + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_legacy_structured_property(): + class OtherKind(model.Model): + foo = model.IntegerProperty() + bar = model.StringProperty() + + class ThisKind(model.Model): + baz = model.StructuredProperty(OtherKind) + copacetic = model.BooleanProperty() + + key = datastore.Key("ThisKind", 123, project="testing") + datastore_entity = datastore.Entity(key=key) + datastore_entity.update( + { + "baz.foo": 42, + "baz.bar": "himom", + "copacetic": True, + "super.fluous": "whocares?", + } + ) + protobuf = helpers.entity_to_protobuf(datastore_entity) + entity = model._entity_from_protobuf(protobuf) + assert isinstance(entity, ThisKind) + assert entity.baz.foo == 42 + assert entity.baz.bar == "himom" + assert entity.copacetic is True + + assert not hasattr(entity, "super") + assert not hasattr(entity, "super.fluous") + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_repeated_structured_property(): + class OtherKind(model.Model): + foo = model.IntegerProperty() + bar = model.StringProperty() + + class ThisKind(model.Model): + baz = model.StructuredProperty(OtherKind, repeated=True) + copacetic = model.BooleanProperty() + + key = datastore.Key("ThisKind", 123, project="testing") + datastore_entity = datastore.Entity(key=key) + datastore_entity.update( + { + "baz.foo": [42, 144], + "baz.bar": ["himom", "hellodad"], + "copacetic": True, + } + ) + protobuf = helpers.entity_to_protobuf(datastore_entity) + entity = model._entity_from_protobuf(protobuf) + assert isinstance(entity, ThisKind) + assert entity.baz[0].foo == 42 + assert entity.baz[0].bar == "himom" + assert entity.baz[1].foo == 144 + assert entity.baz[1].bar == "hellodad" + assert entity.copacetic is True + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_legacy_repeated_structured_property_projection(): + class OtherKind(model.Model): + foo = model.IntegerProperty() + bar = model.StringProperty() + + class ThisKind(model.Model): + baz = model.StructuredProperty(OtherKind, repeated=True) + copacetic = model.BooleanProperty() + + key = datastore.Key("ThisKind", 123, project="testing") + datastore_entity = datastore.Entity(key=key) + datastore_entity.update({"baz.foo": 42, "baz.bar": "himom", "copacetic": True}) + protobuf = helpers.entity_to_protobuf(datastore_entity) + entity = model._entity_from_protobuf(protobuf) + assert isinstance(entity, ThisKind) + assert entity.baz[0].foo == 42 + assert entity.baz[0].bar == "himom" + assert entity.copacetic is True + + +class Test_entity_from_ds_entity: + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_legacy_repeated_structured_property_uneven(): + class OtherKind(model.Model): + foo = model.IntegerProperty() + bar = model.StringProperty() + + class ThisKind(model.Model): + baz = model.StructuredProperty(OtherKind, repeated=True) + copacetic = model.BooleanProperty() + + key = datastore.Key("ThisKind", 123, project="testing") + datastore_entity = datastore.Entity(key=key) + datastore_entity.items = mock.Mock( + return_value=( + # Order counts for coverage + ("baz.foo", [42, 144]), + ("baz.bar", ["himom", "hellodad", "iminjail"]), + ("copacetic", True), + ) + ) + + entity = model._entity_from_ds_entity(datastore_entity) + assert isinstance(entity, ThisKind) + assert entity.baz[0].foo == 42 + assert entity.baz[0].bar == "himom" + assert entity.baz[1].foo == 144 + assert entity.baz[1].bar == "hellodad" + assert entity.baz[2].foo is None + assert entity.baz[2].bar == "iminjail" + assert entity.copacetic is True + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_legacy_repeated_structured_property_uneven_expandos(): + class Expando1(model.Expando): + bar = model.StringProperty() + + class Expando2(model.Expando): + qux = model.StringProperty() + + class ThisKind(model.Model): + foo = model.StructuredProperty(model_class=Expando1, repeated=True) + baz = model.StructuredProperty(model_class=Expando2, repeated=True) + + key = datastore.Key("ThisKind", 123, project="testing") + datastore_entity = datastore.Entity(key=key) + datastore_entity.items = mock.Mock( + return_value=( + # Order matters here + ("foo.bar", ["foo_bar_1"]), + ("baz.qux", ["baz_qux_1", "baz_qux_2"]), + ("foo.custom_1", ["foo_c1_1", "foo_c1_2"]), # longer than foo.bar + ) + ) + entity = model._entity_from_ds_entity(datastore_entity) + assert isinstance(entity, ThisKind) + assert len(entity.foo) == 2 + assert len(entity.baz) == 2 + assert entity.foo[0].bar == "foo_bar_1" + assert entity.foo[0].custom_1 == "foo_c1_1" + assert entity.foo[1].bar is None + assert entity.foo[1].custom_1 == "foo_c1_2" + assert entity.baz[0].qux == "baz_qux_1" + assert entity.baz[1].qux == "baz_qux_2" + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_legacy_repeated_structured_property_with_name(): + class OtherKind(model.Model): + foo = model.IntegerProperty() + bar = model.StringProperty() + + class ThisKind(model.Model): + baz = model.StructuredProperty(OtherKind, "b", repeated=True) + copacetic = model.BooleanProperty() + + key = datastore.Key("ThisKind", 123, project="testing") + datastore_entity = datastore.Entity(key=key) + datastore_entity.items = mock.Mock( + return_value=( + # Order counts for coverage + ("b.foo", [42, 144]), + ("b.bar", ["himom", "hellodad", "iminjail"]), + ("copacetic", True), + ) + ) + + entity = model._entity_from_ds_entity(datastore_entity) + assert isinstance(entity, ThisKind) + assert entity.baz[0].foo == 42 + assert entity.baz[0].bar == "himom" + assert entity.baz[1].foo == 144 + assert entity.baz[1].bar == "hellodad" + assert entity.baz[2].foo is None + assert entity.baz[2].bar == "iminjail" + assert entity.copacetic is True + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_polymodel(): + class Animal(polymodel.PolyModel): + foo = model.IntegerProperty() + + class Cat(Animal): + bar = model.StringProperty() + + key = datastore.Key("Animal", 123, project="testing") + datastore_entity = datastore.Entity(key=key) + datastore_entity.update( + {"foo": 42, "bar": "himom!", "class": ["Animal", "Cat"]} + ) + + entity = model._entity_from_ds_entity(datastore_entity) + assert isinstance(entity, Cat) + assert entity.foo == 42 + assert entity.bar == "himom!" + assert entity.class_ == ["Animal", "Cat"] + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_polymodel_projection(): + class Animal(polymodel.PolyModel): + foo = model.IntegerProperty() + + class Cat(Animal): + bar = model.StringProperty() + + key = datastore.Key("Animal", 123, project="testing") + datastore_entity = datastore.Entity(key=key) + datastore_entity.update({"foo": 42, "bar": "himom!", "class": "Cat"}) + + entity = model._entity_from_ds_entity(datastore_entity) + assert isinstance(entity, Cat) + assert entity.foo == 42 + assert entity.bar == "himom!" + assert entity.class_ == ["Cat"] + + +class Test_entity_to_protobuf: + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_standard_case(): + class ThisKind(model.Model): + a = model.IntegerProperty() + b = model.BooleanProperty() + c = model.PickleProperty() + d = model.StringProperty(repeated=True) + e = model.PickleProperty(repeated=True) + notaproperty = True + + dill = {"sandwiches": ["turkey", "reuben"], "not_sandwiches": "tacos"} + gherkin = [{"a": {"b": "c"}, "d": 0}, [1, 2, 3], "himom"] + key = key_module.Key("ThisKind", 123, app="testing") + + entity = ThisKind( + key=key, + a=42, + c=gherkin, + d=["foo", "bar", "baz"], + e=[gherkin, dill], + ) + + entity_pb = model._entity_to_protobuf(entity) + assert isinstance(entity_pb, ds_types.Entity) + assert entity_pb.properties["a"].integer_value == 42 + assert entity_pb.properties["b"].null_value == 0 + assert pickle.loads(entity_pb.properties["c"].blob_value) == gherkin + d_values = entity_pb.properties["d"].array_value.values + assert d_values[0].string_value == "foo" + assert d_values[1].string_value == "bar" + assert d_values[2].string_value == "baz" + e_values = entity_pb.properties["e"].array_value.values + assert pickle.loads(e_values[0].blob_value) == gherkin + assert pickle.loads(e_values[1].blob_value) == dill + assert "__key__" not in entity_pb.properties + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_property_named_key(): + class ThisKind(model.Model): + key = model.StringProperty() + + key = key_module.Key("ThisKind", 123, app="testing") + entity = ThisKind(key="not the key", _key=key) + + entity_pb = model._entity_to_protobuf(entity) + assert entity_pb.properties["key"].string_value == "not the key" + assert entity_pb.key.path[0].kind == "ThisKind" + assert entity_pb.key.path[0].id == 123 + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_override_property(): + class ThatKind(model.Model): + a = model.StringProperty() + + class ThisKind(ThatKind): + a = model.IntegerProperty() + b = model.BooleanProperty() + c = model.PickleProperty() + d = model.StringProperty(repeated=True) + e = model.PickleProperty(repeated=True) + notaproperty = True + + dill = {"sandwiches": ["turkey", "reuben"], "not_sandwiches": "tacos"} + gherkin = [{"a": {"b": "c"}, "d": 0}, [1, 2, 3], "himom"] + key = key_module.Key("ThisKind", 123, app="testing") + + entity = ThisKind( + key=key, + a=42, + c=gherkin, + d=["foo", "bar", "baz"], + e=[gherkin, dill], + ) + + entity_pb = model._entity_to_protobuf(entity) + assert isinstance(entity_pb, ds_types.Entity) + assert entity_pb.properties["a"].integer_value == 42 + assert entity_pb.properties["b"].null_value == 0 + assert pickle.loads(entity_pb.properties["c"].blob_value) == gherkin + d_values = entity_pb.properties["d"].array_value.values + assert d_values[0].string_value == "foo" + assert d_values[1].string_value == "bar" + assert d_values[2].string_value == "baz" + e_values = entity_pb.properties["e"].array_value.values + assert pickle.loads(e_values[0].blob_value) == gherkin + assert pickle.loads(e_values[1].blob_value) == dill + assert "__key__" not in entity_pb.properties + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_uninitialized_property(): + class ThisKind(model.Model): + foo = model.StringProperty(required=True) + + entity = ThisKind() + + with pytest.raises(exceptions.BadValueError): + model._entity_to_protobuf(entity) + + +class TestExpando: + @staticmethod + def test_constructor(): + class Expansive(model.Expando): + foo = model.StringProperty() + + expansive = Expansive(foo="x", bar="y", baz="z") + assert expansive._properties == {"foo": "x", "bar": "y", "baz": "z"} + # Make sure we didn't change properties for the class + assert Expansive._properties == {"foo": "foo"} + + @staticmethod + def test___getattr__(): + class Expansive(model.Expando): + foo = model.StringProperty() + + expansive = Expansive(foo="x", bar="y", baz="z") + assert expansive.bar == "y" + + @staticmethod + def test___getattr__from_model(): + class Expansive(model.Expando): + foo = model.StringProperty() + + expansive = Expansive(foo="x", bar="y", baz="z") + assert expansive._default_filters() == () + + @staticmethod + def test___getattr__from_model_error(): + class Expansive(model.Expando): + foo = model.StringProperty() + + expansive = Expansive(foo="x", bar="y", baz="z") + with pytest.raises(AttributeError): + expansive.notaproperty + + @staticmethod + def test___setattr__with_model(): + class Expansive(model.Expando): + foo = model.StringProperty() + + expansive = Expansive(foo="x", bar=model.Model()) + assert isinstance(expansive.bar, model.Model) + + @staticmethod + def test___setattr__with_dict(): + class Expansive(model.Expando): + foo = model.StringProperty() + + expansive = Expansive(foo="x", bar={"bar": "y", "baz": "z"}) + assert expansive.bar.baz == "z" + + @staticmethod + def test___setattr__with_dotted_name(): + """Regression test for issue #673 + + https://github.com/googleapis/python-ndb/issues/673 + """ + + class Expansive(model.Expando): + foo = model.StringProperty() + + expansive = Expansive(foo="x") + setattr(expansive, "a.b", "one") + assert expansive.a.b == "one" + + setattr(expansive, "a.c", "two") + assert expansive.a.b == "one" + assert expansive.a.c == "two" + + @staticmethod + def test___delattr__(): + class Expansive(model.Expando): + foo = model.StringProperty() + + expansive = Expansive(foo="x") + expansive.baz = "y" + assert expansive._properties == {"foo": "x", "baz": "y"} + del expansive.baz + assert expansive._properties == {"foo": "x"} + + @staticmethod + def test___delattr__from_model(): + class Expansive(model.Expando): + foo = model.StringProperty() + + expansive = Expansive(foo="x") + with pytest.raises(AttributeError): + del expansive._nnexistent + + @staticmethod + def test___delattr__non_property(): + class Expansive(model.Expando): + foo = model.StringProperty() + + expansive = Expansive(foo="x") + expansive.baz = "y" + expansive._properties["baz"] = "Not a Property" + with pytest.raises(TypeError): + del expansive.baz + + @staticmethod + def test___delattr__runtime_error(): + class Expansive(model.Expando): + foo = model.StringProperty() + + expansive = Expansive(foo="x") + expansive.baz = "y" + model.Model._properties["baz"] = "baz" + with pytest.raises(RuntimeError): + del expansive.baz + + +class Test__legacy_db_get_value: + @staticmethod + def test_str_blobkey(): + prop = model.Property() + p = _legacy_entity_pb.Property() + p.set_meaning(_legacy_entity_pb.Property.BLOBKEY) + v = _legacy_entity_pb.PropertyValue() + v.set_stringvalue(b"foo") + assert prop._legacy_db_get_value(v, p) == model.BlobKey(b"foo") + + @staticmethod + def test_str_blob(): + prop = model.Property() + p = _legacy_entity_pb.Property() + p.set_meaning(_legacy_entity_pb.Property.BLOB) + v = _legacy_entity_pb.PropertyValue() + v.set_stringvalue(b"foo") + assert prop._legacy_db_get_value(v, p) == b"foo" + + @staticmethod + def test_str_blob_compressed(): + prop = model.Property() + p = _legacy_entity_pb.Property() + p.set_meaning(_legacy_entity_pb.Property.BLOB) + p.set_meaning_uri("ZLIB") + v = _legacy_entity_pb.PropertyValue() + v.set_stringvalue(b"foo") + assert prop._legacy_db_get_value(v, p) == b"foo" + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_str_entity_proto(): + prop = model.Property() + p = _legacy_entity_pb.Property() + p.set_meaning(_legacy_entity_pb.Property.ENTITY_PROTO) + v = _legacy_entity_pb.PropertyValue() + v.set_stringvalue(b"\x6a\x03\x6a\x01\x42") + assert isinstance(prop._legacy_db_get_value(v, p), model.Expando) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_str_entity_proto_no_key(): + prop = model.Property() + p = _legacy_entity_pb.Property() + p.set_meaning(_legacy_entity_pb.Property.ENTITY_PROTO) + v = _legacy_entity_pb.PropertyValue() + v.set_stringvalue(b"\x72\x0a\x0b\x12\x01\x44\x18\x01\x22\x01\x45\x0c") + assert isinstance(prop._legacy_db_get_value(v, p), model.Expando) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_str_entity_proto_bad(): + prop = model.Property() + p = _legacy_entity_pb.Property() + p.set_meaning(_legacy_entity_pb.Property.ENTITY_PROTO) + v = _legacy_entity_pb.PropertyValue() + v.set_stringvalue(b"\x6a\x0c\x72\x0a\x0b\x12\x01\x44\x18\x01\x22\x01\x45\x0c") + with pytest.raises(ValueError): + prop._legacy_db_get_value(v, p) + + @staticmethod + def test_str_bytestr_meaning(): + prop = model.Property() + p = _legacy_entity_pb.Property() + p.set_meaning(_legacy_entity_pb.Property.BYTESTRING) + v = _legacy_entity_pb.PropertyValue() + v.set_stringvalue(b"foo") + assert prop._legacy_db_get_value(v, p) == b"foo" + + @staticmethod + def test_str_utf8(): + prop = model.Property() + p = _legacy_entity_pb.Property() + v = _legacy_entity_pb.PropertyValue() + v.has_stringvalue_ = 1 + v.stringvalue_ = bytes("fo\xc3", encoding="utf-8") + assert prop._legacy_db_get_value(v, p) == "fo\xc3" + + @staticmethod + def test_str_decode_error(): + prop = model.Property() + p = _legacy_entity_pb.Property() + v = _legacy_entity_pb.PropertyValue() + v.set_stringvalue(b"\xe9") + assert prop._legacy_db_get_value(v, p) == b"\xe9" + + @staticmethod + def test_int_gd_when(): + prop = model.Property() + p = _legacy_entity_pb.Property() + p.set_meaning(_legacy_entity_pb.Property.GD_WHEN) + v = _legacy_entity_pb.PropertyValue() + v.set_int64value(42) + d = datetime.datetime(1970, 1, 1, 0, 0, 0, 42) + assert prop._legacy_db_get_value(v, p) == d + + @staticmethod + def test_boolean(): + prop = model.Property() + p = _legacy_entity_pb.Property() + v = _legacy_entity_pb.PropertyValue() + v.set_booleanvalue(True) + assert prop._legacy_db_get_value(v, p) is True + + @staticmethod + def test_double(): + prop = model.Property() + p = _legacy_entity_pb.Property() + v = _legacy_entity_pb.PropertyValue() + v.set_doublevalue(3.1415) + assert prop._legacy_db_get_value(v, p) == 3.1415 + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_reference(): + prop = model.Property() + p = _legacy_entity_pb.Property() + v = _legacy_entity_pb.PropertyValue() + r = _legacy_entity_pb.PropertyValue_ReferenceValue() + e = _legacy_entity_pb.PropertyValue_ReferenceValuePathElement() + e.set_type("a") + e.set_id("b") + r.pathelement_ = [e] + r.set_app("c") + v.mutable_referencevalue() + v.referencevalue_ = r + key = key_module.Key("a", "b", app="c", namespace="") + assert prop._legacy_db_get_value(v, p) == key + + @staticmethod + def test_point(): + prop = model.Property() + p = _legacy_entity_pb.Property() + v = _legacy_entity_pb.PropertyValue() + r = _legacy_entity_pb.PropertyValue_PointValue() + r.set_x(10) + r.set_y(20) + v.mutable_pointvalue() + v.pointvalue_ = r + assert prop._legacy_db_get_value(v, p) == model.GeoPt(10, 20) + + @staticmethod + def test_user(): + prop = model.Property() + p = _legacy_entity_pb.Property() + v = _legacy_entity_pb.PropertyValue() + u = _legacy_entity_pb.PropertyValue_UserValue() + user = model.User(email="aol@aol.com", _auth_domain="aol.com", _user_id="loa") + u.set_email(b"aol@aol.com") + u.set_auth_domain(b"aol.com") + u.set_obfuscated_gaiaid(b"loa") + v.mutable_uservalue() + v.uservalue_ = u + assert prop._legacy_db_get_value(v, p) == user + + @staticmethod + def test_missing(): + prop = model.Property() + p = _legacy_entity_pb.Property() + v = _legacy_entity_pb.PropertyValue() + assert prop._legacy_db_get_value(v, p) is None + + +class Test__legacy_deserialize: + @staticmethod + def test_empty_list(): + m = model.Model() + prop = model.Property() + p = _legacy_entity_pb.Property() + p.set_meaning(_legacy_entity_pb.Property.EMPTY_LIST) + assert prop._legacy_deserialize(m, p) is None + + @staticmethod + def test_repeated(): + m = model.Model() + prop = model.Property(repeated=True) + p = _legacy_entity_pb.Property() + assert prop._legacy_deserialize(m, p) is None + + @staticmethod + def test_repeated_with_value(): + m = model.Model() + prop = model.Property(repeated=True) + prop._store_value(m, [41]) + p = _legacy_entity_pb.Property() + v = _legacy_entity_pb.PropertyValue() + v.set_int64value(42) + assert prop._legacy_deserialize(m, p) is None + + +class Test__get_property_for: + @staticmethod + def test_depth_bigger_than_parts(): + m = model.Model() + p = _legacy_entity_pb.Property() + p.set_name(b"foo") + assert m._get_property_for(p, depth=5) is None + + @staticmethod + def test_none(): + m = model.Model() + p = _legacy_entity_pb.Property() + p.set_name(b"foo") + assert m._get_property_for(p)._name == "foo" + + +class Test__from_pb: + @staticmethod + def test_not_entity_proto_raises_error(): + m = model.Model() + with pytest.raises(TypeError): + m._from_pb("not a pb") + + @staticmethod + def test_with_key(): + m = model.Model() + pb = _legacy_entity_pb.EntityProto() + key = key_module.Key("a", "b", app="c", database="", namespace="") + ent = m._from_pb(pb, key=key) + assert ent.key == key + + @staticmethod + def test_with_index_meaning(): + m = model.Model() + pb = _legacy_entity_pb.EntityProto() + p = _legacy_entity_pb.Property() + p.set_name(b"foo") + p.set_meaning(_legacy_entity_pb.Property.INDEX_VALUE) + pb.property_ = [p] + ent = m._from_pb(pb) + assert "foo" in ent._projection + + +class Test__fake_property: + @staticmethod + def test_with_clone_properties(): + def clone(): + pass + + m = model.Model() + m._clone_properties = clone + p = _legacy_entity_pb.Property() + p.set_name(b"foo") + fake = m._fake_property(p, "next") + assert fake._name == "next" + + @staticmethod + def test_with_same_name(): + m = model.Model() + p = _legacy_entity_pb.Property() + p.set_name(b"next") + fake = m._fake_property(p, "next") + assert fake._name == "next" + + +@pytest.mark.usefixtures("in_context") +@mock.patch("google.cloud.ndb.key.Key") +@mock.patch("google.cloud.ndb.tasklets.Future") +def test_get_multi(Key, Future): + model1 = model.Model() + future1 = tasklets.Future() + future1.result.return_value = model1 + + key1 = key_module.Key("a", "b", app="c") + key1.get_async.return_value = future1 + + keys = [key1] + assert model.get_multi(keys) == [model1] + + +@pytest.mark.usefixtures("in_context") +@mock.patch("google.cloud.ndb.key.Key") +def test_get_multi_async(Key): + future1 = tasklets.Future() + + key1 = key_module.Key("a", "b", app="c") + key1.get_async.return_value = future1 + + keys = [key1] + assert model.get_multi_async(keys) == [future1] + + +@pytest.mark.usefixtures("in_context") +@mock.patch("google.cloud.ndb.model.Model") +def test_put_multi_async(Model): + future1 = tasklets.Future() + + model1 = model.Model() + model1.put_async.return_value = future1 + + models = [model1] + assert model.put_multi_async(models) == [future1] + + +@pytest.mark.usefixtures("in_context") +@mock.patch("google.cloud.ndb.model.Model") +@mock.patch("google.cloud.ndb.tasklets.Future") +def test_put_multi(Model, Future): + key1 = key_module.Key("a", "b", app="c") + future1 = tasklets.Future() + future1.result.return_value = key1 + + model1 = model.Model() + model1.put_async.return_value = future1 + + models = [model1] + assert model.put_multi(models) == [key1] + + +@pytest.mark.usefixtures("in_context") +@mock.patch("google.cloud.ndb.key.Key") +def test_delete_multi_async(Key): + future1 = tasklets.Future() + + key1 = key_module.Key("a", "b", app="c") + key1.delete_async.return_value = future1 + + keys = [key1] + assert model.delete_multi_async(keys) == [future1] + + +@pytest.mark.usefixtures("in_context") +@mock.patch("google.cloud.ndb.key.Key") +@mock.patch("google.cloud.ndb.tasklets.Future") +def test_delete_multi(Key, Future): + future1 = tasklets.Future() + future1.result.return_value = None + + key1 = key_module.Key("a", "b", app="c") + key1.delete_async.return_value = future1 + + keys = [key1] + assert model.delete_multi(keys) == [None] + + +def test_get_indexes_async(): + with pytest.raises(NotImplementedError): + model.get_indexes_async() + + +def test_get_indexes(): + with pytest.raises(NotImplementedError): + model.get_indexes() + + +@pytest.mark.usefixtures("in_context") +def test_serialization(): + # This is needed because pickle can't serialize local objects + global SomeKind, OtherKind + + class OtherKind(model.Model): + foo = model.IntegerProperty() + + @classmethod + def _get_kind(cls): + return "OtherKind" + + class SomeKind(model.Model): + other = model.StructuredProperty(OtherKind) + + @classmethod + def _get_kind(cls): + return "SomeKind" + + entity = SomeKind(other=OtherKind(foo=1, namespace="Test"), namespace="Test") + assert entity.other.key is None or entity.other.key.id() is None + entity = pickle.loads(pickle.dumps(entity)) + assert entity.other.foo == 1 + + +class Test_Keyword_Name: + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_property_named_project(): + class HasProjectProp(model.Model): + project = model.StringProperty() + + has_project_prop = HasProjectProp( + project="the-property", _project="the-ds-project" + ) + assert has_project_prop.project == "the-property" + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_property_named_app(): + class HasAppProp(model.Model): + app = model.StringProperty() + + has_app_prop = HasAppProp(app="the-property", _app="the-gae-app") + assert has_app_prop.app == "the-property" + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_property_named_database(): + class HasDbProp(model.Model): + database = model.StringProperty() + + has_db_prop = HasDbProp(database="the-property", _database="the-ds-database") + assert has_db_prop.database == "the-property" + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_property_named_namespace(): + class HasNamespaceProp(model.Model): + namespace = model.StringProperty() + + has_namespace_prop = HasNamespaceProp( + namespace="the-property", _namespace="the-ds-namespace" + ) + assert has_namespace_prop.namespace == "the-property" + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_property_named_key(): + k = key_module.Key("HasKeyProp", "k") + + class HasKeyProp(model.Model): + key = model.StringProperty() + + has_key_prop = HasKeyProp(key="the-property", _key=k) + assert has_key_prop.key == "the-property" + assert has_key_prop._key == k + + +def ManyFieldsFactory(): + """Model type class factory. + + This indirection makes sure ``Model._kind_map`` isn't mutated at module + scope, since any mutations would be reset by the ``reset_state`` fixture + run for each test. + """ + + class ManyFields(model.Model): + self = model.IntegerProperty() + id = model.StringProperty() + key = model.FloatProperty(repeated=True) + value = model.StringProperty() + unused = model.FloatProperty() + + return ManyFields diff --git a/packages/google-cloud-ndb/tests/unit/test_msgprop.py b/packages/google-cloud-ndb/tests/unit/test_msgprop.py new file mode 100644 index 000000000000..facd48061d5a --- /dev/null +++ b/packages/google-cloud-ndb/tests/unit/test_msgprop.py @@ -0,0 +1,37 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +from google.cloud.ndb import msgprop + +from . import utils + + +def test___all__(): + utils.verify___all__(msgprop) + + +class TestEnumProperty: + @staticmethod + def test_constructor(): + with pytest.raises(NotImplementedError): + msgprop.EnumProperty() + + +class TestMessageProperty: + @staticmethod + def test_constructor(): + with pytest.raises(NotImplementedError): + msgprop.MessageProperty() diff --git a/packages/google-cloud-ndb/tests/unit/test_packaging.py b/packages/google-cloud-ndb/tests/unit/test_packaging.py new file mode 100644 index 000000000000..2e7aa97a1c50 --- /dev/null +++ b/packages/google-cloud-ndb/tests/unit/test_packaging.py @@ -0,0 +1,37 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import subprocess +import sys + + +def test_namespace_package_compat(tmp_path): + # The ``google`` namespace package should not be masked + # by the presence of ``google-cloud-ndb``. + google = tmp_path / "google" + google.mkdir() + google.joinpath("othermod.py").write_text("") + env = dict(os.environ, PYTHONPATH=str(tmp_path)) + cmd = [sys.executable, "-m", "google.othermod"] + subprocess.check_call(cmd, env=env) + + # The ``google.cloud`` namespace package should not be masked + # by the presence of ``google-cloud-ndb``. + google_cloud = tmp_path / "google" / "cloud" + google_cloud.mkdir() + google_cloud.joinpath("othermod.py").write_text("") + env = dict(os.environ, PYTHONPATH=str(tmp_path)) + cmd = [sys.executable, "-m", "google.cloud.othermod"] + subprocess.check_call(cmd, env=env) diff --git a/packages/google-cloud-ndb/tests/unit/test_polymodel.py b/packages/google-cloud-ndb/tests/unit/test_polymodel.py new file mode 100644 index 000000000000..d217279b08ff --- /dev/null +++ b/packages/google-cloud-ndb/tests/unit/test_polymodel.py @@ -0,0 +1,116 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from unittest import mock + +import pytest + +from google.cloud import datastore +from google.cloud.datastore import helpers +from google.cloud.ndb import model +from google.cloud.ndb import polymodel +from google.cloud.ndb import query + +from . import utils + + +def test___all__(): + utils.verify___all__(polymodel) + + +class Test_ClassKeyProperty: + @staticmethod + def test_constructor(): + prop = polymodel._ClassKeyProperty() + assert prop._name == polymodel._CLASS_KEY_PROPERTY + + @staticmethod + def test__set_value(): + prop = polymodel._ClassKeyProperty() + with pytest.raises(TypeError): + prop._set_value(None, None) + + @staticmethod + def test__get_value(): + prop = polymodel._ClassKeyProperty() + value = ["test"] + values = {prop._name: value} + entity = mock.Mock( + _projection=(prop._name,), + _values=values, + spec=("_projection", "_values"), + ) + assert value is prop._get_value(entity) + + @staticmethod + def test__prepare_for_put(): + prop = polymodel._ClassKeyProperty() + value = ["test"] + values = {prop._name: value} + entity = mock.Mock( + _projection=(prop._name,), + _values=values, + spec=("_projection", "_values"), + ) + assert prop._prepare_for_put(entity) is None + + +class TestPolyModel: + @staticmethod + def test_constructor(): + model = polymodel.PolyModel() + assert model.__dict__ == {"_values": {}} + + @staticmethod + def test_class_property(): + class Animal(polymodel.PolyModel): + pass + + class Feline(Animal): + pass + + class Cat(Feline): + pass + + cat = Cat() + + assert cat._get_kind() == "Animal" + assert cat.class_ == ["Animal", "Feline", "Cat"] + + @staticmethod + def test_default_filters(): + class Animal(polymodel.PolyModel): + pass + + class Cat(Animal): + pass + + assert Animal._default_filters() == () + assert Cat._default_filters() == (query.FilterNode("class", "=", "Cat"),) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_entity_from_protobuf(): + class Animal(polymodel.PolyModel): + pass + + class Cat(Animal): + pass + + key = datastore.Key("Animal", 123, project="testing") + datastore_entity = datastore.Entity(key=key) + datastore_entity["class"] = ["Animal", "Cat"] + protobuf = helpers.entity_to_protobuf(datastore_entity) + entity = model._entity_from_protobuf(protobuf) + assert isinstance(entity, Cat) diff --git a/packages/google-cloud-ndb/tests/unit/test_query.py b/packages/google-cloud-ndb/tests/unit/test_query.py new file mode 100644 index 000000000000..33b560b42e82 --- /dev/null +++ b/packages/google-cloud-ndb/tests/unit/test_query.py @@ -0,0 +1,2424 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pickle + +from unittest import mock + +import pytest + +from google.cloud.datastore import entity as datastore_entity +from google.cloud.datastore import helpers + +from google.cloud.ndb import _datastore_api +from google.cloud.ndb import _datastore_query +from google.cloud.ndb import exceptions +from google.cloud.ndb import key as key_module +from google.cloud.ndb import model +from google.cloud.ndb import query as query_module +from google.cloud.ndb import tasklets + +from . import utils + + +def test___all__(): + utils.verify___all__(query_module) + + +class TestQueryOptions: + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor(): + options = query_module.QueryOptions(kind="test", project="app") + assert options.kind == "test" + assert options.project == "app" + + @staticmethod + def test_constructor_with_config(): + config = query_module.QueryOptions(kind="other", namespace="config_test") + options = query_module.QueryOptions(config=config, kind="test", project="app") + assert options.kind == "test" + assert options.project == "app" + assert options.database is None + assert options.namespace == "config_test" + + @staticmethod + def test_constructor_with_config_specified_db(): + config = query_module.QueryOptions( + kind="other", namespace="config_test", database="config_test" + ) + options = query_module.QueryOptions(config=config, kind="test", project="app") + assert options.kind == "test" + assert options.project == "app" + assert options.database == "config_test" + assert options.namespace == "config_test" + + @staticmethod + def test_constructor_with_bad_config(): + with pytest.raises(TypeError): + query_module.QueryOptions(config="bad") + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test___repr__(): + representation = "QueryOptions(kind='test', project='app')" + options = query_module.QueryOptions(kind="test", project="app") + assert options.__repr__() == representation + + @staticmethod + def test__eq__(): + options = query_module.QueryOptions(kind="test", project="app") + other = query_module.QueryOptions(kind="test", project="app") + otherother = query_module.QueryOptions(kind="nope", project="noway") + + assert options == other + assert options != otherother + assert options != "foo" + + @staticmethod + def test_copy(): + options = query_module.QueryOptions(kind="test", project="app") + options = options.copy(project="app2", database="bar", namespace="foo") + assert options.kind == "test" + assert options.project == "app2" + assert options.database == "bar" + assert options.namespace == "foo" + + @staticmethod + def test_explicitly_set_default_database(in_context): + with in_context.new().use() as context: + context.client.database = "newdb" + options = query_module.QueryOptions(context=context) + assert options.database == "newdb" + + @staticmethod + def test_explicitly_set_default_namespace(in_context): + with in_context.new(namespace="somethingelse").use() as context: + options = query_module.QueryOptions(context=context, namespace="") + assert options.namespace == "" + + +class TestPropertyOrder: + @staticmethod + def test_constructor(): + order = query_module.PropertyOrder(name="property", reverse=False) + assert order.name == "property" + assert order.reverse is False + + @staticmethod + def test___repr__(): + representation = "PropertyOrder(name='property', reverse=False)" + order = query_module.PropertyOrder(name="property", reverse=False) + assert order.__repr__() == representation + + @staticmethod + def test___neg__ascending(): + order = query_module.PropertyOrder(name="property", reverse=False) + assert order.reverse is False + new_order = -order + assert new_order.reverse is True + + @staticmethod + def test___neg__descending(): + order = query_module.PropertyOrder(name="property", reverse=True) + assert order.reverse is True + new_order = -order + assert new_order.reverse is False + + +class TestRepeatedStructuredPropertyPredicate: + @staticmethod + def test_constructor(): + predicate = query_module.RepeatedStructuredPropertyPredicate( + "matilda", + ["foo", "bar", "baz"], + mock.Mock(properties={"foo": "a", "bar": "b", "baz": "c"}), + ) + assert predicate.name == "matilda" + assert predicate.match_keys == ["foo", "bar", "baz"] + assert predicate.match_values == ["a", "b", "c"] + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test___call__(): + class SubKind(model.Model): + bar = model.IntegerProperty() + baz = model.StringProperty() + + class SomeKind(model.Model): + foo = model.StructuredProperty(SubKind, repeated=True) + + match_entity = SubKind(bar=1, baz="scoggs") + predicate = query_module.RepeatedStructuredPropertyPredicate( + "foo", ["bar", "baz"], model._entity_to_protobuf(match_entity) + ) + + entity = SomeKind( + foo=[SubKind(bar=2, baz="matic"), SubKind(bar=1, baz="scoggs")] + ) + + assert predicate(model._entity_to_protobuf(entity)) is True + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test___call__no_match(): + class SubKind(model.Model): + bar = model.IntegerProperty() + baz = model.StringProperty() + + class SomeKind(model.Model): + foo = model.StructuredProperty(SubKind, repeated=True) + + match_entity = SubKind(bar=1, baz="scoggs") + predicate = query_module.RepeatedStructuredPropertyPredicate( + "foo", ["bar", "baz"], model._entity_to_protobuf(match_entity) + ) + + entity = SomeKind( + foo=[SubKind(bar=1, baz="matic"), SubKind(bar=2, baz="scoggs")] + ) + + assert predicate(model._entity_to_protobuf(entity)) is False + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test___call__legacy(): + class SubKind(model.Model): + bar = model.IntegerProperty() + baz = model.StringProperty() + + class SomeKind(model.Model): + foo = model.StructuredProperty(SubKind, repeated=True) + + match_entity = SubKind(bar=1, baz="scoggs") + predicate = query_module.RepeatedStructuredPropertyPredicate( + "foo", ["bar", "baz"], model._entity_to_protobuf(match_entity) + ) + + ds_key = key_module.Key("SomeKind", None)._key + ds_entity = datastore_entity.Entity(ds_key) + ds_entity.update( + { + "something.else": "whocares", + "foo.bar": [2, 1], + "foo.baz": ["matic", "scoggs"], + } + ) + + assert predicate(helpers.entity_to_protobuf(ds_entity)) is True + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test___call__no_subentities(): + class SubKind(model.Model): + bar = model.IntegerProperty() + baz = model.StringProperty() + + class SomeKind(model.Model): + foo = model.StructuredProperty(SubKind, repeated=True) + + match_entity = SubKind(bar=1, baz="scoggs") + predicate = query_module.RepeatedStructuredPropertyPredicate( + "foo", ["bar", "baz"], model._entity_to_protobuf(match_entity) + ) + + ds_key = key_module.Key("SomeKind", None)._key + ds_entity = datastore_entity.Entity(ds_key) + ds_entity.update({"something.else": "whocares"}) + + assert predicate(helpers.entity_to_protobuf(ds_entity)) is False + + +class TestParameterizedThing: + @staticmethod + def test___eq__(): + thing = query_module.ParameterizedThing() + with pytest.raises(NotImplementedError): + thing == mock.sentinel.other + + @staticmethod + def test___ne__(): + thing = query_module.ParameterizedThing() + with pytest.raises(NotImplementedError): + thing != mock.sentinel.other + + +class TestParameter: + @staticmethod + def test_constructor(): + for key in (88, "def"): + parameter = query_module.Parameter(key) + assert parameter._key == key + + @staticmethod + def test_constructor_invalid(): + with pytest.raises(TypeError): + query_module.Parameter(None) + + @staticmethod + def test___repr__(): + parameter = query_module.Parameter("ghi") + assert repr(parameter) == "Parameter('ghi')" + + @staticmethod + def test___eq__(): + parameter1 = query_module.Parameter("yep") + parameter2 = query_module.Parameter("nope") + parameter3 = mock.sentinel.parameter + assert parameter1 == parameter1 + assert not parameter1 == parameter2 + assert not parameter1 == parameter3 + + @staticmethod + def test___ne__(): + parameter1 = query_module.Parameter("yep") + parameter2 = query_module.Parameter("nope") + parameter3 = mock.sentinel.parameter + assert not parameter1 != parameter1 + assert parameter1 != parameter2 + assert parameter1 != parameter3 + + @staticmethod + def test_key(): + parameter = query_module.Parameter(9000) + assert parameter.key == 9000 + + @staticmethod + def test_resolve(): + key = 9000 + bound_value = "resoolt" + parameter = query_module.Parameter(key) + used = {} + result = parameter.resolve({key: bound_value}, used) + assert result == bound_value + assert used == {key: True} + + @staticmethod + def test_resolve_missing_key(): + parameter = query_module.Parameter(9000) + used = {} + with pytest.raises(exceptions.BadArgumentError): + parameter.resolve({}, used) + + assert used == {} + + +class TestParameterizedFunction: + @staticmethod + def test_constructor(): + query = query_module.ParameterizedFunction("user", [query_module.Parameter(1)]) + assert query.func == "user" + assert query.values == [query_module.Parameter(1)] + + @staticmethod + def test_constructor_bad_function(): + with pytest.raises(ValueError): + query_module.ParameterizedFunction("notafunc", ()) + + @staticmethod + def test___repr__(): + query = query_module.ParameterizedFunction("user", [query_module.Parameter(1)]) + assert query.__repr__() == "ParameterizedFunction('user', [Parameter(1)])" + + @staticmethod + def test___eq__parameter(): + query = query_module.ParameterizedFunction("user", [query_module.Parameter(1)]) + assert ( + query.__eq__( + query_module.ParameterizedFunction("user", [query_module.Parameter(1)]) + ) + is True + ) + + @staticmethod + def test___eq__no_parameter(): + query = query_module.ParameterizedFunction("user", [query_module.Parameter(1)]) + assert query.__eq__(42) is NotImplemented + + @staticmethod + def test_is_parameterized_True(): + query = query_module.ParameterizedFunction("user", [query_module.Parameter(1)]) + assert query.is_parameterized() + + @staticmethod + def test_is_parameterized_False(): + query = query_module.ParameterizedFunction("user", [1]) + assert not query.is_parameterized() + + @staticmethod + def test_is_parameterized_no_arguments(): + query = query_module.ParameterizedFunction("user", ()) + assert not query.is_parameterized() + + @staticmethod + def test_resolve(): + query = query_module.ParameterizedFunction( + "list", [1, query_module.Parameter(2), query_module.Parameter(3)] + ) + used = {} + resolved = query.resolve({2: 4, 3: 6}, used) + assert resolved == [1, 4, 6] + assert used == {2: True, 3: True} + + +class TestNode: + @staticmethod + def test_constructor(): + with pytest.raises(TypeError): + query_module.Node() + + @staticmethod + def _make_one(): + # Bypass the intentionally broken constructor. + node = object.__new__(query_module.Node) + assert isinstance(node, query_module.Node) + return node + + def test___eq__(self): + node = self._make_one() + with pytest.raises(NotImplementedError): + node == mock.sentinel.other + + def test___ne__(self): + node = self._make_one() + with pytest.raises(NotImplementedError): + node != mock.sentinel.no_node + + def test___le__(self): + node = self._make_one() + with pytest.raises(TypeError) as exc_info: + node <= None + + assert exc_info.value.args == ("Nodes cannot be ordered",) + + def test___lt__(self): + node = self._make_one() + with pytest.raises(TypeError) as exc_info: + node < None + + assert exc_info.value.args == ("Nodes cannot be ordered",) + + def test___ge__(self): + node = self._make_one() + with pytest.raises(TypeError) as exc_info: + node >= None + + assert exc_info.value.args == ("Nodes cannot be ordered",) + + def test___gt__(self): + node = self._make_one() + with pytest.raises(TypeError) as exc_info: + node > None + + assert exc_info.value.args == ("Nodes cannot be ordered",) + + def test__to_filter(self): + node = self._make_one() + with pytest.raises(NotImplementedError): + node._to_filter() + + def test__post_filters(self): + node = self._make_one() + assert node._post_filters() is None + + def test_resolve(self): + node = self._make_one() + used = {} + assert node.resolve({}, used) is node + assert used == {} + + +class TestFalseNode: + @staticmethod + def test___eq__(): + false_node1 = query_module.FalseNode() + false_node2 = query_module.FalseNode() + false_node3 = mock.sentinel.false_node + assert false_node1 == false_node1 + assert false_node1 == false_node2 + assert not false_node1 == false_node3 + + @staticmethod + def test___ne__(): + false_node1 = query_module.FalseNode() + false_node2 = query_module.FalseNode() + false_node3 = mock.sentinel.false_node + assert not false_node1 != false_node1 + assert not false_node1 != false_node2 + assert false_node1 != false_node3 + + @staticmethod + def test__to_filter(): + false_node = query_module.FalseNode() + with pytest.raises(exceptions.BadQueryError): + false_node._to_filter() + + @staticmethod + def test__to_filter_post(): + false_node = query_module.FalseNode() + assert false_node._to_filter(post=True) is None + + +class TestParameterNode: + @staticmethod + def test_constructor(): + prop = model.Property(name="val") + param = query_module.Parameter("abc") + parameter_node = query_module.ParameterNode(prop, "=", param) + assert parameter_node._prop is prop + assert parameter_node._op == "=" + assert parameter_node._param is param + + @staticmethod + def test_constructor_bad_property(): + param = query_module.Parameter(11) + with pytest.raises(TypeError): + query_module.ParameterNode(None, "!=", param) + + @staticmethod + def test_constructor_bad_op(): + prop = model.Property(name="guitar") + param = query_module.Parameter("pick") + with pytest.raises(TypeError): + query_module.ParameterNode(prop, "less", param) + + @staticmethod + def test_constructor_bad_param(): + prop = model.Property(name="california") + with pytest.raises(TypeError): + query_module.ParameterNode(prop, "<", None) + + @staticmethod + def test_pickling(): + prop = model.Property(name="val") + param = query_module.Parameter("abc") + parameter_node = query_module.ParameterNode(prop, "=", param) + + pickled = pickle.dumps(parameter_node, pickle.HIGHEST_PROTOCOL) + unpickled = pickle.loads(pickled) + assert parameter_node == unpickled + + @staticmethod + def test___repr__(): + prop = model.Property(name="val") + param = query_module.Parameter("abc") + parameter_node = query_module.ParameterNode(prop, "=", param) + + expected = "ParameterNode({!r}, '=', Parameter('abc'))".format(prop) + assert repr(parameter_node) == expected + + @staticmethod + def test___eq__(): + prop1 = model.Property(name="val") + param1 = query_module.Parameter("abc") + parameter_node1 = query_module.ParameterNode(prop1, "=", param1) + prop2 = model.Property(name="ue") + parameter_node2 = query_module.ParameterNode(prop2, "=", param1) + parameter_node3 = query_module.ParameterNode(prop1, "<", param1) + param2 = query_module.Parameter(900) + parameter_node4 = query_module.ParameterNode(prop1, "=", param2) + parameter_node5 = mock.sentinel.parameter_node + + assert parameter_node1 == parameter_node1 + assert not parameter_node1 == parameter_node2 + assert not parameter_node1 == parameter_node3 + assert not parameter_node1 == parameter_node4 + assert not parameter_node1 == parameter_node5 + + @staticmethod + def test___ne__(): + prop1 = model.Property(name="val") + param1 = query_module.Parameter("abc") + parameter_node1 = query_module.ParameterNode(prop1, "=", param1) + prop2 = model.Property(name="ue") + parameter_node2 = query_module.ParameterNode(prop2, "=", param1) + parameter_node3 = query_module.ParameterNode(prop1, "<", param1) + param2 = query_module.Parameter(900) + parameter_node4 = query_module.ParameterNode(prop1, "=", param2) + parameter_node5 = mock.sentinel.parameter_node + + assert not parameter_node1 != parameter_node1 + assert parameter_node1 != parameter_node2 + assert parameter_node1 != parameter_node3 + assert parameter_node1 != parameter_node4 + assert parameter_node1 != parameter_node5 + + @staticmethod + def test__to_filter(): + prop = model.Property(name="val") + param = query_module.Parameter("abc") + parameter_node = query_module.ParameterNode(prop, "=", param) + with pytest.raises(exceptions.BadArgumentError): + parameter_node._to_filter() + + @staticmethod + def test_resolve_simple(): + prop = model.Property(name="val") + param = query_module.Parameter("abc") + parameter_node = query_module.ParameterNode(prop, "=", param) + + value = 67 + bindings = {"abc": value} + used = {} + resolved_node = parameter_node.resolve(bindings, used) + + assert resolved_node == query_module.FilterNode("val", "=", value) + assert used == {"abc": True} + + @staticmethod + def test_resolve_with_in(): + prop = model.Property(name="val") + param = query_module.Parameter("replace") + parameter_node = query_module.ParameterNode(prop, "in", param) + + value = (19, 20, 28) + bindings = {"replace": value} + used = {} + resolved_node = parameter_node.resolve(bindings, used) + + assert resolved_node == query_module.DisjunctionNode( + query_module.FilterNode("val", "=", 19), + query_module.FilterNode("val", "=", 20), + query_module.FilterNode("val", "=", 28), + ) + assert used == {"replace": True} + + @staticmethod + def test_resolve_in_empty_container(): + prop = model.Property(name="val") + param = query_module.Parameter("replace") + parameter_node = query_module.ParameterNode(prop, "in", param) + + value = () + bindings = {"replace": value} + used = {} + resolved_node = parameter_node.resolve(bindings, used) + + assert resolved_node == query_module.FalseNode() + assert used == {"replace": True} + + +class TestFilterNode: + @staticmethod + def test_constructor(): + filter_node = query_module.FilterNode("a", ">", 9) + assert filter_node._name == "a" + assert filter_node._opsymbol == ">" + assert filter_node._value == 9 + + @staticmethod + def test_constructor_with_key(): + key = key_module.Key("a", "b", app="c", namespace="d", database="db") + filter_node = query_module.FilterNode("name", "=", key) + assert filter_node._name == "name" + assert filter_node._opsymbol == "=" + assert filter_node._value is key._key + + @staticmethod + def test_constructor_in(): + or_node = query_module.FilterNode("a", "in", ("x", "y", "z")) + + filter_node1 = query_module.FilterNode("a", "=", "x") + filter_node2 = query_module.FilterNode("a", "=", "y") + filter_node3 = query_module.FilterNode("a", "=", "z") + assert or_node == query_module.DisjunctionNode( + filter_node1, filter_node2, filter_node3 + ) + + @staticmethod + def test_constructor_in_single(): + filter_node = query_module.FilterNode("a", "in", [9000]) + assert isinstance(filter_node, query_module.FilterNode) + assert filter_node._name == "a" + assert filter_node._opsymbol == "=" + assert filter_node._value == 9000 + + @staticmethod + def test_constructor_in_empty(): + filter_node = query_module.FilterNode("a", "in", set()) + assert isinstance(filter_node, query_module.FalseNode) + + @staticmethod + def test_constructor_in_invalid_container(): + with pytest.raises(TypeError): + query_module.FilterNode("a", "in", {}) + + @staticmethod + def test_constructor_ne(): + ne_node = query_module.FilterNode("a", "!=", 2.5) + + filter_node1 = query_module.FilterNode("a", "<", 2.5) + filter_node2 = query_module.FilterNode("a", ">", 2.5) + assert ne_node != query_module.DisjunctionNode(filter_node1, filter_node2) + assert ne_node._value == 2.5 + assert ne_node._opsymbol == "!=" + assert ne_node._name == "a" + + @staticmethod + def test_pickling(): + filter_node = query_module.FilterNode("speed", ">=", 88) + + pickled = pickle.dumps(filter_node, pickle.HIGHEST_PROTOCOL) + unpickled = pickle.loads(pickled) + assert filter_node == unpickled + + @staticmethod + def test___repr__(): + filter_node = query_module.FilterNode("speed", ">=", 88) + assert repr(filter_node) == "FilterNode('speed', '>=', 88)" + + @staticmethod + def test___eq__(): + filter_node1 = query_module.FilterNode("speed", ">=", 88) + filter_node2 = query_module.FilterNode("slow", ">=", 88) + filter_node3 = query_module.FilterNode("speed", "<=", 88) + filter_node4 = query_module.FilterNode("speed", ">=", 188) + filter_node5 = mock.sentinel.filter_node + assert filter_node1 == filter_node1 + assert not filter_node1 == filter_node2 + assert not filter_node1 == filter_node3 + assert not filter_node1 == filter_node4 + assert not filter_node1 == filter_node5 + + @staticmethod + def test__to_filter_post(): + filter_node = query_module.FilterNode("speed", ">=", 88) + assert filter_node._to_filter(post=True) is None + + @staticmethod + def test__to_ne_filter_op(): + filter_node = query_module.FilterNode("speed", "!=", 88) + assert filter_node._to_filter(post=True) is None + + @staticmethod + @mock.patch("google.cloud.ndb._datastore_query") + def test__to_filter(_datastore_query): + as_filter = _datastore_query.make_filter.return_value + filter_node = query_module.FilterNode("speed", ">=", 88) + assert filter_node._to_filter() is as_filter + _datastore_query.make_filter.assert_called_once_with("speed", ">=", 88) + + +class TestPostFilterNode: + @staticmethod + def test_constructor(): + predicate = mock.sentinel.predicate + post_filter_node = query_module.PostFilterNode(predicate) + assert post_filter_node.predicate is predicate + + @staticmethod + def test_pickling(): + predicate = "must-be-pickle-able" + post_filter_node = query_module.PostFilterNode(predicate) + + pickled = pickle.dumps(post_filter_node, pickle.HIGHEST_PROTOCOL) + unpickled = pickle.loads(pickled) + assert post_filter_node == unpickled + + @staticmethod + def test___repr__(): + predicate = "predicate-not-repr" + post_filter_node = query_module.PostFilterNode(predicate) + assert repr(post_filter_node) == "PostFilterNode(predicate-not-repr)" + + @staticmethod + def test___eq__(): + predicate1 = mock.sentinel.predicate1 + post_filter_node1 = query_module.PostFilterNode(predicate1) + predicate2 = mock.sentinel.predicate2 + post_filter_node2 = query_module.PostFilterNode(predicate2) + post_filter_node3 = mock.sentinel.post_filter_node + assert post_filter_node1 == post_filter_node1 + assert not post_filter_node1 == post_filter_node2 + assert not post_filter_node1 == post_filter_node3 + + @staticmethod + def test___ne__(): + predicate1 = mock.sentinel.predicate1 + post_filter_node1 = query_module.PostFilterNode(predicate1) + predicate2 = mock.sentinel.predicate2 + post_filter_node2 = query_module.PostFilterNode(predicate2) + post_filter_node3 = mock.sentinel.post_filter_node + assert not post_filter_node1 != post_filter_node1 + assert post_filter_node1 != post_filter_node2 + assert post_filter_node1 != post_filter_node3 + + @staticmethod + def test__to_filter_post(): + predicate = mock.sentinel.predicate + post_filter_node = query_module.PostFilterNode(predicate) + assert post_filter_node._to_filter(post=True) is predicate + + @staticmethod + def test__to_filter(): + predicate = mock.sentinel.predicate + post_filter_node = query_module.PostFilterNode(predicate) + assert post_filter_node._to_filter() is None + + +class Test_BooleanClauses: + @staticmethod + def test_constructor_or(): + or_clauses = query_module._BooleanClauses("name", True) + assert or_clauses.name == "name" + assert or_clauses.combine_or + assert or_clauses.or_parts == [] + + @staticmethod + def test_constructor_and(): + and_clauses = query_module._BooleanClauses("name", False) + assert and_clauses.name == "name" + assert not and_clauses.combine_or + assert and_clauses.or_parts == [[]] + + @staticmethod + def test_add_node_invalid(): + clauses = query_module._BooleanClauses("name", False) + with pytest.raises(TypeError): + clauses.add_node(None) + + @staticmethod + def test_add_node_or_with_simple(): + clauses = query_module._BooleanClauses("name", True) + node = query_module.FilterNode("a", "=", 7) + clauses.add_node(node) + assert clauses.or_parts == [node] + + @staticmethod + def test_add_node_or_with_disjunction(): + clauses = query_module._BooleanClauses("name", True) + node1 = query_module.FilterNode("a", "=", 7) + node2 = query_module.FilterNode("b", ">", 7.5) + node3 = query_module.DisjunctionNode(node1, node2) + clauses.add_node(node3) + assert clauses.or_parts == [node1, node2] + + @staticmethod + def test_add_node_and_with_simple(): + clauses = query_module._BooleanClauses("name", False) + node1 = query_module.FilterNode("a", "=", 7) + node2 = query_module.FilterNode("b", ">", 7.5) + node3 = query_module.FilterNode("c", "<", "now") + # Modify to see the "broadcast" + clauses.or_parts = [[node1], [node2], [node3]] + + node4 = query_module.FilterNode("d", ">=", 80) + clauses.add_node(node4) + assert clauses.or_parts == [ + [node1, node4], + [node2, node4], + [node3, node4], + ] + + @staticmethod + def test_add_node_and_with_conjunction(): + clauses = query_module._BooleanClauses("name", False) + node1 = query_module.FilterNode("a", "=", 7) + node2 = query_module.FilterNode("b", ">", 7.5) + clauses.or_parts = [[node1], [node2]] # Modify to see the "broadcast" + + node3 = query_module.FilterNode("c", "<", "now") + node4 = query_module.FilterNode("d", ">=", 80) + node5 = query_module.ConjunctionNode(node3, node4) + clauses.add_node(node5) + assert clauses.or_parts == [ + [node1, node3, node4], + [node2, node3, node4], + ] + + @staticmethod + def test_add_node_and_with_disjunction(): + clauses = query_module._BooleanClauses("name", False) + node1 = query_module.FilterNode("a", "=", 7) + node2 = query_module.FilterNode("b", ">", 7.5) + clauses.or_parts = [[node1], [node2]] # Modify to see the "broadcast" + + node3 = query_module.FilterNode("c", "<", "now") + node4 = query_module.FilterNode("d", ">=", 80) + node5 = query_module.DisjunctionNode(node3, node4) + clauses.add_node(node5) + assert clauses.or_parts == [ + [node1, node3], + [node1, node4], + [node2, node3], + [node2, node4], + ] + + +class TestConjunctionNode: + @staticmethod + def test_constructor_no_nodes(): + with pytest.raises(TypeError): + query_module.ConjunctionNode() + + @staticmethod + def test_constructor_one_node(): + node = query_module.FilterNode("a", "=", 7) + result_node = query_module.ConjunctionNode(node) + assert result_node is node + + @staticmethod + def test_constructor_many_nodes(): + node1 = query_module.FilterNode("a", "=", 7) + node2 = query_module.FilterNode("b", ">", 7.5) + node3 = query_module.FilterNode("c", "<", "now") + node4 = query_module.FilterNode("d", ">=", 80) + + result_node = query_module.ConjunctionNode(node1, node2, node3, node4) + assert isinstance(result_node, query_module.ConjunctionNode) + assert result_node._nodes == [node1, node2, node3, node4] + + @staticmethod + def test_constructor_convert_or(): + node1 = query_module.FilterNode("a", "=", 7) + node2 = query_module.FilterNode("b", ">", 7.5) + node3 = query_module.DisjunctionNode(node1, node2) + node4 = query_module.FilterNode("d", ">=", 80) + + result_node = query_module.ConjunctionNode(node3, node4) + assert isinstance(result_node, query_module.DisjunctionNode) + assert result_node._nodes == [ + query_module.ConjunctionNode(node1, node4), + query_module.ConjunctionNode(node2, node4), + ] + + @staticmethod + @mock.patch("google.cloud.ndb.query._BooleanClauses") + def test_constructor_unreachable(boolean_clauses): + clauses = mock.Mock(or_parts=[], spec=("add_node", "or_parts")) + boolean_clauses.return_value = clauses + + node1 = query_module.FilterNode("a", "=", 7) + node2 = query_module.FilterNode("b", ">", 7.5) + + with pytest.raises(RuntimeError): + query_module.ConjunctionNode(node1, node2) + + boolean_clauses.assert_called_once_with("ConjunctionNode", combine_or=False) + assert clauses.add_node.call_count == 2 + clauses.add_node.assert_has_calls([mock.call(node1), mock.call(node2)]) + + @staticmethod + def test_pickling(): + node1 = query_module.FilterNode("a", "=", 7) + node2 = query_module.FilterNode("b", ">", 7.5) + and_node = query_module.ConjunctionNode(node1, node2) + + pickled = pickle.dumps(and_node, pickle.HIGHEST_PROTOCOL) + unpickled = pickle.loads(pickled) + assert and_node == unpickled + + @staticmethod + def test___iter__(): + node1 = query_module.FilterNode("a", "=", 7) + node2 = query_module.FilterNode("b", ">", 7.5) + and_node = query_module.ConjunctionNode(node1, node2) + + assert list(and_node) == and_node._nodes + + @staticmethod + def test___repr__(): + node1 = query_module.FilterNode("a", "=", 7) + node2 = query_module.FilterNode("b", ">", 7.5) + and_node = query_module.ConjunctionNode(node1, node2) + expected = "AND(FilterNode('a', '=', 7), FilterNode('b', '>', 7.5))" + assert repr(and_node) == expected + + @staticmethod + def test___eq__(): + filter_node1 = query_module.FilterNode("a", "=", 7) + filter_node2 = query_module.FilterNode("b", ">", 7.5) + filter_node3 = query_module.FilterNode("c", "<", "now") + + and_node1 = query_module.ConjunctionNode(filter_node1, filter_node2) + and_node2 = query_module.ConjunctionNode(filter_node2, filter_node1) + and_node3 = query_module.ConjunctionNode(filter_node1, filter_node3) + and_node4 = mock.sentinel.and_node + + assert and_node1 == and_node1 + assert not and_node1 == and_node2 + assert not and_node1 == and_node3 + assert not and_node1 == and_node4 + + @staticmethod + def test___ne__(): + filter_node1 = query_module.FilterNode("a", "=", 7) + filter_node2 = query_module.FilterNode("b", ">", 7.5) + filter_node3 = query_module.FilterNode("c", "<", "now") + + and_node1 = query_module.ConjunctionNode(filter_node1, filter_node2) + and_node2 = query_module.ConjunctionNode(filter_node2, filter_node1) + and_node3 = query_module.ConjunctionNode(filter_node1, filter_node3) + and_node4 = mock.sentinel.and_node + + assert not and_node1 != and_node1 + assert and_node1 != and_node2 + assert and_node1 != and_node3 + assert and_node1 != and_node4 + + @staticmethod + def test__to_filter_empty(): + node1 = query_module.FilterNode("a", "=", 7) + node2 = query_module.FilterNode("b", "<", 6) + and_node = query_module.ConjunctionNode(node1, node2) + + as_filter = and_node._to_filter(post=True) + assert as_filter is None + + @staticmethod + def test__to_filter_single(): + node1 = mock.Mock(spec=query_module.FilterNode) + node2 = query_module.PostFilterNode("predicate") + node3 = mock.Mock(spec=query_module.FilterNode) + node3._to_filter.return_value = False + and_node = query_module.ConjunctionNode(node1, node2, node3) + + as_filter = and_node._to_filter() + assert as_filter is node1._to_filter.return_value + + node1._to_filter.assert_called_once_with(post=False) + + @staticmethod + @mock.patch("google.cloud.ndb._datastore_query") + def test__to_filter_multiple(_datastore_query): + node1 = mock.Mock(spec=query_module.FilterNode) + node2 = query_module.PostFilterNode("predicate") + node3 = mock.Mock(spec=query_module.FilterNode) + and_node = query_module.ConjunctionNode(node1, node2, node3) + + as_filter = _datastore_query.make_composite_and_filter.return_value + assert and_node._to_filter() is as_filter + + _datastore_query.make_composite_and_filter.assert_called_once_with( + [node1._to_filter.return_value, node3._to_filter.return_value] + ) + + @staticmethod + def test__to_filter_multiple_post(): + def predicate_one(entity_pb): + return entity_pb["x"] == 1 + + def predicate_two(entity_pb): + return entity_pb["y"] == 2 + + node1 = query_module.PostFilterNode(predicate_one) + node2 = query_module.PostFilterNode(predicate_two) + and_node = query_module.ConjunctionNode(node1, node2) + + predicate = and_node._to_filter(post=True) + assert predicate({"x": 1, "y": 1}) is False + assert predicate({"x": 1, "y": 2}) is True + assert predicate({"x": 2, "y": 2}) is False + + @staticmethod + def test__post_filters_empty(): + node1 = query_module.FilterNode("a", "=", 7) + node2 = query_module.FilterNode("b", ">", 77) + and_node = query_module.ConjunctionNode(node1, node2) + + post_filters_node = and_node._post_filters() + assert post_filters_node is None + + @staticmethod + def test__post_filters_single(): + node1 = query_module.FilterNode("a", "=", 7) + node2 = query_module.PostFilterNode("predicate2") + and_node = query_module.ConjunctionNode(node1, node2) + + post_filters_node = and_node._post_filters() + assert post_filters_node is node2 + + @staticmethod + def test__post_filters_multiple(): + node1 = query_module.FilterNode("a", "=", 7) + node2 = query_module.PostFilterNode("predicate2") + node3 = query_module.PostFilterNode("predicate3") + and_node = query_module.ConjunctionNode(node1, node2, node3) + + post_filters_node = and_node._post_filters() + assert post_filters_node == query_module.ConjunctionNode(node2, node3) + + @staticmethod + def test__post_filters_same(): + node1 = query_module.PostFilterNode("predicate1") + node2 = query_module.PostFilterNode("predicate2") + and_node = query_module.ConjunctionNode(node1, node2) + + post_filters_node = and_node._post_filters() + assert post_filters_node is and_node + + @staticmethod + def test_resolve(): + node1 = query_module.FilterNode("a", "=", 7) + node2 = query_module.FilterNode("b", ">", 77) + and_node = query_module.ConjunctionNode(node1, node2) + + bindings = {} + used = {} + resolved_node = and_node.resolve(bindings, used) + + assert resolved_node is and_node + assert bindings == {} + assert used == {} + + @staticmethod + def test_resolve_changed(): + node1 = mock.Mock(spec=query_module.FilterNode) + node2 = query_module.FilterNode("b", ">", 77) + node3 = query_module.FilterNode("c", "=", 7) + node1.resolve.return_value = node3 + and_node = query_module.ConjunctionNode(node1, node2) + + bindings = {} + used = {} + resolved_node = and_node.resolve(bindings, used) + + assert isinstance(resolved_node, query_module.ConjunctionNode) + assert resolved_node._nodes == [node3, node2] + assert bindings == {} + assert used == {} + node1.resolve.assert_called_once_with(bindings, used) + + +class TestDisjunctionNode: + @staticmethod + def test_constructor_no_nodes(): + with pytest.raises(TypeError): + query_module.DisjunctionNode() + + @staticmethod + def test_constructor_one_node(): + node = query_module.FilterNode("a", "=", 7) + result_node = query_module.DisjunctionNode(node) + assert result_node is node + + @staticmethod + def test_constructor_many_nodes(): + node1 = query_module.FilterNode("a", "=", 7) + node2 = query_module.FilterNode("b", ">", 7.5) + node3 = query_module.FilterNode("c", "<", "now") + node4 = query_module.FilterNode("d", ">=", 80) + + result_node = query_module.DisjunctionNode(node1, node2, node3, node4) + assert isinstance(result_node, query_module.DisjunctionNode) + assert result_node._nodes == [node1, node2, node3, node4] + + @staticmethod + def test_pickling(): + node1 = query_module.FilterNode("a", "=", 7) + node2 = query_module.FilterNode("b", ">", 7.5) + or_node = query_module.DisjunctionNode(node1, node2) + + pickled = pickle.dumps(or_node, pickle.HIGHEST_PROTOCOL) + unpickled = pickle.loads(pickled) + assert or_node == unpickled + + @staticmethod + def test___iter__(): + node1 = query_module.FilterNode("a", "=", 7) + node2 = query_module.FilterNode("b", ">", 7.5) + or_node = query_module.DisjunctionNode(node1, node2) + + assert list(or_node) == or_node._nodes + + @staticmethod + def test___repr__(): + node1 = query_module.FilterNode("a", "=", 7) + node2 = query_module.FilterNode("b", ">", 7.5) + or_node = query_module.DisjunctionNode(node1, node2) + expected = "OR(FilterNode('a', '=', 7), FilterNode('b', '>', 7.5))" + assert repr(or_node) == expected + + @staticmethod + def test___eq__(): + filter_node1 = query_module.FilterNode("a", "=", 7) + filter_node2 = query_module.FilterNode("b", ">", 7.5) + filter_node3 = query_module.FilterNode("c", "<", "now") + + or_node1 = query_module.DisjunctionNode(filter_node1, filter_node2) + or_node2 = query_module.DisjunctionNode(filter_node2, filter_node1) + or_node3 = query_module.DisjunctionNode(filter_node1, filter_node3) + or_node4 = mock.sentinel.or_node + + assert or_node1 == or_node1 + assert not or_node1 == or_node2 + assert not or_node1 == or_node3 + assert not or_node1 == or_node4 + + @staticmethod + def test___ne__(): + filter_node1 = query_module.FilterNode("a", "=", 7) + filter_node2 = query_module.FilterNode("b", ">", 7.5) + filter_node3 = query_module.FilterNode("c", "<", "now") + + or_node1 = query_module.DisjunctionNode(filter_node1, filter_node2) + or_node2 = query_module.DisjunctionNode(filter_node2, filter_node1) + or_node3 = query_module.DisjunctionNode(filter_node1, filter_node3) + or_node4 = mock.sentinel.or_node + + assert not or_node1 != or_node1 + assert or_node1 != or_node2 + assert or_node1 != or_node3 + assert or_node1 != or_node4 + + @staticmethod + def test_resolve(): + node1 = query_module.FilterNode("a", "=", 7) + node2 = query_module.FilterNode("b", ">", 77) + or_node = query_module.DisjunctionNode(node1, node2) + + bindings = {} + used = {} + resolved_node = or_node.resolve(bindings, used) + + assert resolved_node is or_node + assert bindings == {} + assert used == {} + + @staticmethod + def test_resolve_changed(): + node1 = mock.Mock(spec=query_module.FilterNode) + node2 = query_module.FilterNode("b", ">", 77) + node3 = query_module.FilterNode("c", "=", 7) + node1.resolve.return_value = node3 + or_node = query_module.DisjunctionNode(node1, node2) + + bindings = {} + used = {} + resolved_node = or_node.resolve(bindings, used) + + assert isinstance(resolved_node, query_module.DisjunctionNode) + assert resolved_node._nodes == [node3, node2] + assert bindings == {} + assert used == {} + node1.resolve.assert_called_once_with(bindings, used) + + @staticmethod + def test__to_filter_post(): + node1 = mock.Mock(spec=query_module.FilterNode) + node2 = mock.Mock(spec=query_module.FilterNode) + or_node = query_module.DisjunctionNode(node1, node2) + + with pytest.raises(NotImplementedError): + or_node._to_filter(post=True) + + +def test_AND(): + assert query_module.AND is query_module.ConjunctionNode + + +def test_OR(): + assert query_module.OR is query_module.DisjunctionNode + + +class TestQuery: + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor(): + query = query_module.Query(kind="Foo") + assert query.kind == "Foo" + assert query.ancestor is None + assert query.filters is None + assert query.order_by is None + + @staticmethod + def test_constructor_app_and_project(): + with pytest.raises(TypeError): + query_module.Query(app="foo", project="bar") + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_with_ancestor_parameterized_function(): + query = query_module.Query( + ancestor=query_module.ParameterizedFunction( + "key", query_module.Parameter(1) + ) + ) + assert query.ancestor == query_module.ParameterizedFunction( + "key", query_module.Parameter(1) + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_with_ancestor_and_project(): + key = key_module.Key("a", "b", app="app") + query = query_module.Query(ancestor=key, project="app") + assert query.project == "app" + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_with_ancestor_and_namespace(): + key = key_module.Key("a", "b", namespace="space") + query = query_module.Query(ancestor=key, namespace="space") + assert query.namespace == "space" + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_with_ancestor_and_default_namespace(): + key = key_module.Key("a", "b", namespace=None) + query = query_module.Query(ancestor=key, namespace="") + assert query.namespace == "" + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_with_ancestor_parameterized_thing(): + query = query_module.Query(ancestor=query_module.ParameterizedThing()) + assert isinstance(query.ancestor, query_module.ParameterizedThing) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query") + def test_constructor_with_class_attribute_projection(_datastore_query): + class Foo(model.Model): + string_attr = model.StringProperty() + + class Bar(model.Model): + bar_attr = model.StructuredProperty(Foo) + + query = Bar.query(projection=[Bar.bar_attr.string_attr]) + + assert query.projection[0] == ("bar_attr.string_attr",)[0] + + query.fetch() + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query") + def test_constructor_with_class_attribute_projection_and_distinct( + _datastore_query, + ): + class Foo(model.Model): + string_attr = model.StringProperty() + + class Bar(model.Model): + bar_attr = model.StructuredProperty(Foo) + + query = Bar.query( + projection=[Bar.bar_attr.string_attr], + distinct_on=[Bar.bar_attr.string_attr], + ) + + assert query.projection[0] == ("bar_attr.string_attr",)[0] + assert query.distinct_on[0] == ("bar_attr.string_attr",)[0] + + query.fetch() + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_with_projection(): + query = query_module.Query(kind="Foo", projection=["X"]) + assert query.projection == ("X",) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb.model.Model._check_properties") + def test_constructor_with_projection_as_property(_check_props): + query = query_module.Query(kind="Foo", projection=[model.Property(name="X")]) + assert query.projection == ("X",) + _check_props.assert_not_called() + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb.model.Model._check_properties") + def test_constructor_with_projection_as_property_modelclass(_check_props): + class Foo(model.Model): + x = model.IntegerProperty() + + query = query_module.Query(kind="Foo", projection=[model.Property(name="x")]) + assert query.projection == ("x",) + _check_props.assert_called_once_with(["x"]) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_with_distinct_on(): + query = query_module.Query(kind="Foo", distinct_on=["X"]) + assert query.distinct_on == ("X",) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_with_group_by(): + query = query_module.Query(kind="Foo", group_by=["X"]) + assert query.distinct_on == ("X",) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_with_distinct_on_and_group_by(): + with pytest.raises(TypeError): + query_module.Query(distinct_on=[], group_by=[]) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_with_filters(): + query = query_module.Query(filters=query_module.FilterNode("f", None, None)) + assert isinstance(query.filters, query_module.Node) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_with_order_by(): + query = query_module.Query(order_by=[]) + assert query.order_by == [] + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_with_orders(): + query = query_module.Query(orders=[]) + assert query.order_by == [] + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_with_orders_and_order_by(): + with pytest.raises(TypeError): + query_module.Query(orders=[], order_by=[]) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_with_default_options(): + options = query_module.QueryOptions() + query = query_module.Query(default_options=options) + assert query.default_options == options + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_with_bad_default_options(): + with pytest.raises(TypeError): + query_module.Query(default_options="bad") + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_constructor_with_default_options_and_projection(): + options = query_module.QueryOptions(projection=["X"]) + with pytest.raises(TypeError): + query_module.Query(projection=["Y"], default_options=options) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_query_errors(): + with pytest.raises(TypeError): + query_module.Query( + ancestor=query_module.ParameterizedFunction( + "user", query_module.Parameter(1) + ) + ) + with pytest.raises(TypeError): + query_module.Query(ancestor=42) + with pytest.raises(ValueError): + query_module.Query(ancestor=model.Key("Kind", None)) + with pytest.raises(TypeError): + query_module.Query(ancestor=model.Key("Kind", 1), app="another") + with pytest.raises(TypeError): + query_module.Query(ancestor=model.Key("X", 1), namespace="another") + with pytest.raises(TypeError): + query_module.Query(filters=42) + with pytest.raises(TypeError): + query_module.Query(order_by=42) + with pytest.raises(TypeError): + query_module.Query(projection="") + with pytest.raises(TypeError): + query_module.Query(projection=42) + with pytest.raises(TypeError): + query_module.Query(projection=[42]) + with pytest.raises(TypeError): + query_module.Query(group_by="") + with pytest.raises(TypeError): + query_module.Query(group_by=42) + with pytest.raises(TypeError): + query_module.Query(group_by=[]) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test___repr__(): + options = query_module.QueryOptions(kind="Bar") + query = query_module.Query( + kind="Foo", + ancestor=key_module.Key("a", "b", app="app", namespace="space"), + namespace="space", + app="app", + group_by=["X"], + projection=[model.Property(name="x")], + filters=query_module.FilterNode("f", None, None), + default_options=options, + order_by=[], + ) + rep = ( + "Query(project='app', namespace='space', kind='Foo', ancestor=" + "Key('a', 'b', project='app', namespace='space'), filters=" + "FilterNode('f', None, None), order_by=[], projection=['x'], " + "distinct_on=['X'], default_options=QueryOptions(kind='Bar'))" + ) + assert query.__repr__() == rep + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test___repr__no_params(): + query = query_module.Query() + rep = "Query()" + assert query.__repr__() == rep + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test___repr__keys_only(): + query = query_module.Query(keys_only=True) + rep = "Query(keys_only=True)" + assert query.__repr__() == rep + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_bind(): + options = query_module.QueryOptions(kind="Bar") + query = query_module.Query( + kind="Foo", + ancestor=key_module.Key("a", "b", app="app", namespace="space"), + namespace="space", + app="app", + group_by=["X"], + projection=[model.Property(name="x")], + filters=query_module.FilterNode("f", None, None), + default_options=options, + order_by=[], + ) + query2 = query.bind() + assert query2.kind == "Foo" + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_bind_with_parameter_ancestor(): + options = query_module.QueryOptions(kind="Bar") + query = query_module.Query( + kind="Foo", + ancestor=query_module.Parameter("xyz"), + namespace="space", + app="app", + group_by=["X"], + projection=[model.Property(name="x")], + filters=query_module.FilterNode("f", None, None), + default_options=options, + order_by=[], + ) + key = key_module.Key("a", "b", app="app", namespace="space") + query2 = query.bind(xyz=key) + assert query2.kind == "Foo" + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_bind_with_bound_and_unbound(): + options = query_module.QueryOptions(kind="Bar") + query = query_module.Query( + kind="Foo", + ancestor=query_module.Parameter("xyz"), + namespace="space", + app="app", + group_by=["X"], + projection=[model.Property(name="x")], + filters=query_module.FilterNode("f", None, None), + default_options=options, + order_by=[], + ) + with pytest.raises(exceptions.BadArgumentError): + query.bind(42, "xyz", xyz="1") + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_bind_error(): + query = query_module.Query() + with pytest.raises(exceptions.BadArgumentError): + query.bind(42) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_is_distinct_true(context): + query = query_module.Query( + group_by=["X"], projection=[model.Property(name="X")] + ) + assert query.is_distinct is True + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_is_distinct_false(context): + query = query_module.Query( + group_by=["X"], projection=[model.Property(name="y")] + ) + assert query.is_distinct is False + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_filter(context): + query = query_module.Query( + kind="Foo", filters=query_module.FilterNode("x", "=", 1) + ) + filters = [ + query_module.FilterNode("y", ">", 0), + query_module.FilterNode("y", "<", 1000), + ] + query = query.filter(*filters) + filters.insert(0, query_module.FilterNode("x", "=", 1)) + assert query.filters == query_module.ConjunctionNode(*filters) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_filter_one_arg(context): + query = query_module.Query(kind="Foo") + filters = (query_module.FilterNode("y", ">", 0),) + query = query.filter(*filters) + assert query.filters == filters[0] + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_filter_no_args(context): + query = query_module.Query( + kind="Foo", filters=query_module.FilterNode("x", "=", 1) + ) + filters = [] + query = query.filter(*filters) + assert query.filters == query_module.FilterNode("x", "=", 1) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_filter_bad_args(context): + query = query_module.Query( + kind="Foo", filters=query_module.FilterNode("x", "=", 1) + ) + filters = ["f"] + with pytest.raises(TypeError): + query.filter(*filters) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_analyze(context): + query = query_module.Query( + kind="Foo", + filters=query_module.FilterNode("x", "=", 1), + ancestor=query_module.Parameter("xyz"), + ) + analysis = query.analyze() + assert analysis == ["xyz"] + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_analyze_no_args(context): + query = query_module.Query(kind="Foo") + analysis = query.analyze() + assert analysis == [] + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_order(context): + prop1 = model.Property(name="prop1") + prop2 = model.Property(name="prop2") + prop3 = model.Property(name="prop3") + prop4 = model.Property(name="prop4") + query = query_module.Query(kind="Foo", order_by=[prop1, -prop2]) + query = query.order(prop3, prop4) + assert len(query.order_by) == 4 + assert query.order_by[0].name == "prop1" + assert query.order_by[0].reverse is False + assert query.order_by[1].name == "prop2" + assert query.order_by[1].reverse is True + assert query.order_by[2].name == "prop3" + assert query.order_by[2].reverse is False + assert query.order_by[3].name == "prop4" + assert query.order_by[3].reverse is False + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_order_mixed(context): + class Foo(model.Model): + prop1 = model.Property(name="prop1") + prop2 = model.Property(name="prop2") + prop3 = model.Property(name="prop3") + prop4 = model.Property(name="prop4") + + query = query_module.Query(kind="Foo", order_by=["prop1", -Foo.prop2]) + query = query.order("-prop3", Foo.prop4) + assert len(query.order_by) == 4 + assert query.order_by[0].name == "prop1" + assert query.order_by[0].reverse is False + assert query.order_by[1].name == "prop2" + assert query.order_by[1].reverse is True + assert query.order_by[2].name == "prop3" + assert query.order_by[2].reverse is True + assert query.order_by[3].name == "prop4" + assert query.order_by[3].reverse is False + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_order_no_initial_order(context): + prop1 = model.Property(name="prop1") + prop2 = model.Property(name="prop2") + query = query_module.Query(kind="Foo") + query = query.order(prop1, -prop2) + assert len(query.order_by) == 2 + assert query.order_by[0].name == "prop1" + assert query.order_by[0].reverse is False + assert query.order_by[1].name == "prop2" + assert query.order_by[1].reverse is True + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_order_no_args(context): + prop1 = model.Property(name="prop1") + prop2 = model.Property(name="prop2") + query = query_module.Query(kind="Foo", order_by=[prop1, -prop2]) + query = query.order() + assert len(query.order_by) == 2 + assert query.order_by[0].name == "prop1" + assert query.order_by[0].reverse is False + assert query.order_by[1].name == "prop2" + assert query.order_by[1].reverse is True + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_order_bad_args(context): + query = query_module.Query(kind="Foo") + with pytest.raises(TypeError): + query.order([5, 10]) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query") + def test_fetch_async(_datastore_query): + future = tasklets.Future("fetch") + _datastore_query.fetch.return_value = future + query = query_module.Query() + assert query.fetch_async() is future + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query") + def test_fetch_async_w_project_and_namespace_from_query(_datastore_query): + query = query_module.Query(project="foo", namespace="bar") + response = _datastore_query.fetch.return_value + assert query.fetch_async() is response + _datastore_query.fetch.assert_called_once_with( + query_module.QueryOptions(project="foo", namespace="bar") + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query") + def test_fetch_async_with_keys_only(_datastore_query): + query = query_module.Query() + response = _datastore_query.fetch.return_value + assert query.fetch_async(keys_only=True) is response + _datastore_query.fetch.assert_called_once_with( + query_module.QueryOptions(project="testing", projection=["__key__"]) + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query") + def test_fetch_async_with_keys_only_as_option(_datastore_query): + query = query_module.Query() + options = query_module.QueryOptions(keys_only=True) + response = _datastore_query.fetch.return_value + assert query.fetch_async(options=options) is response + _datastore_query.fetch.assert_called_once_with( + query_module.QueryOptions(project="testing", keys_only=True) + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_fetch_async_with_keys_only_and_projection(): + query = query_module.Query() + with pytest.raises(TypeError): + query.fetch_async(keys_only=True, projection=["foo", "bar"]) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query") + def test_fetch_async_with_projection(_datastore_query): + query = query_module.Query() + response = _datastore_query.fetch.return_value + assert query.fetch_async(projection=("foo", "bar")) is response + _datastore_query.fetch.assert_called_once_with( + query_module.QueryOptions(project="testing", projection=["foo", "bar"]) + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query") + def test_fetch_async_with_projection_with_properties(_datastore_query): + query = query_module.Query() + response = _datastore_query.fetch.return_value + foo = model.IntegerProperty() + foo._name = "foo" + bar = model.IntegerProperty() + bar._name = "bar" + assert query.fetch_async(projection=(foo, bar)) is response + _datastore_query.fetch.assert_called_once_with( + query_module.QueryOptions(project="testing", projection=["foo", "bar"]) + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query") + def test_fetch_async_with_projection_from_query(_datastore_query): + query = query_module.Query(projection=("foo", "bar")) + options = query_module.QueryOptions() + response = _datastore_query.fetch.return_value + assert query.fetch_async(options=options) is response + _datastore_query.fetch.assert_called_once_with( + query_module.QueryOptions(project="testing", projection=("foo", "bar")) + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_fetch_async_with_bad_projection(): + query = query_module.Query() + with pytest.raises(TypeError): + query.fetch_async(projection=[45]) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query") + def test_fetch_async_with_offset(_datastore_query): + query = query_module.Query() + response = _datastore_query.fetch.return_value + assert query.fetch_async(offset=20) is response + _datastore_query.fetch.assert_called_once_with( + query_module.QueryOptions(project="testing", offset=20) + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query") + def test_fetch_async_with_limit(_datastore_query): + query = query_module.Query() + response = _datastore_query.fetch.return_value + assert query.fetch_async(limit=20) is response + _datastore_query.fetch.assert_called_once_with( + query_module.QueryOptions(project="testing", limit=20) + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query") + def test_fetch_async_with_limit_as_positional_arg(_datastore_query): + query = query_module.Query() + response = _datastore_query.fetch.return_value + assert query.fetch_async(20) is response + _datastore_query.fetch.assert_called_once_with( + query_module.QueryOptions(project="testing", limit=20) + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_fetch_async_with_limit_twice(): + query = query_module.Query() + with pytest.raises(TypeError): + query.fetch_async(20, limit=10) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_fetch_async_with_batch_size(): + query = query_module.Query() + with pytest.raises(NotImplementedError): + query.fetch_async(batch_size=20) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_fetch_async_with_prefetch_size(): + query = query_module.Query() + with pytest.raises(NotImplementedError): + query.fetch_async(prefetch_size=20) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query") + def test_fetch_async_with_produce_cursors(_datastore_query): + query = query_module.Query() + response = _datastore_query.fetch.return_value + assert query.fetch_async(produce_cursors=True) is response + _datastore_query.fetch.assert_called_once_with( + query_module.QueryOptions(project="testing") + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query") + def test_fetch_async_with_start_cursor(_datastore_query): + query = query_module.Query() + response = _datastore_query.fetch.return_value + assert query.fetch_async(start_cursor="cursor") is response + _datastore_query.fetch.assert_called_once_with( + query_module.QueryOptions(project="testing", start_cursor="cursor") + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query") + def test_fetch_async_with_end_cursor(_datastore_query): + query = query_module.Query() + response = _datastore_query.fetch.return_value + assert query.fetch_async(end_cursor="cursor") is response + _datastore_query.fetch.assert_called_once_with( + query_module.QueryOptions(project="testing", end_cursor="cursor") + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query") + def test_fetch_async_with_deadline(_datastore_query): + query = query_module.Query() + response = _datastore_query.fetch.return_value + assert query.fetch_async(deadline=20) is response + _datastore_query.fetch.assert_called_once_with( + query_module.QueryOptions(project="testing", timeout=20) + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query") + def test_fetch_async_with_timeout(_datastore_query): + query = query_module.Query() + response = _datastore_query.fetch.return_value + assert query.fetch_async(timeout=20) is response + _datastore_query.fetch.assert_called_once_with( + query_module.QueryOptions(project="testing", timeout=20) + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query") + def test_fetch_async_with_read_policy(_datastore_query): + query = query_module.Query() + response = _datastore_query.fetch.return_value + assert query.fetch_async(read_policy="foo") is response + _datastore_query.fetch.assert_called_once_with( + query_module.QueryOptions(project="testing", read_consistency="foo") + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query") + def test_fetch_async_with_transaction(_datastore_query): + query = query_module.Query() + response = _datastore_query.fetch.return_value + assert query.fetch_async(transaction="foo") is response + _datastore_query.fetch.assert_called_once_with( + query_module.QueryOptions(project="testing", transaction="foo") + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query") + def test_fetch_async_with_tx_and_read_consistency(_datastore_query): + query = query_module.Query() + with pytest.raises(TypeError): + query.fetch_async( + transaction="foo", read_consistency=_datastore_api.EVENTUAL + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query") + def test_fetch_async_with_tx_and_read_policy(_datastore_query): + query = query_module.Query() + with pytest.raises(TypeError): + query.fetch_async(transaction="foo", read_policy=_datastore_api.EVENTUAL) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_fetch_async_with_bogus_argument(): + query = query_module.Query() + with pytest.raises(TypeError): + query.fetch_async(bogus_argument=20) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query") + def test_fetch(_datastore_query): + future = tasklets.Future("fetch") + future.set_result("foo") + _datastore_query.fetch.return_value = future + query = query_module.Query() + assert query.fetch() == "foo" + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query") + def test_fetch_with_limit_as_positional_arg(_datastore_query): + future = tasklets.Future("fetch") + future.set_result("foo") + _datastore_query.fetch.return_value = future + query = query_module.Query() + assert query.fetch(20) == "foo" + _datastore_query.fetch.assert_called_once_with( + query_module.QueryOptions(project="testing", limit=20) + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query") + def test_fetch_projection_of_unindexed_property(_datastore_query): + class SomeKind(model.Model): + foo = model.IntegerProperty(indexed=False) + + future = tasklets.Future("fetch") + future.set_result("foo") + _datastore_query.fetch.return_value = future + query = query_module.Query(kind="SomeKind") + with pytest.raises(model.InvalidPropertyError): + query.fetch(projection=["foo"]) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query") + def test_count(_datastore_query): + _datastore_query.count.return_value = utils.future_result(42) + query = query_module.Query() + assert query.count() == 42 + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query") + def test_count_async(_datastore_query): + _datastore_query.count.return_value = utils.future_result(42) + query = query_module.Query() + assert query.count_async().result() == 42 + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_run_to_queue(): + query = query_module.Query() + with pytest.raises(NotImplementedError): + query.run_to_queue("foo", "bar") + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_iter(): + query = query_module.Query() + iterator = query.iter() + assert isinstance(iterator, _datastore_query.QueryIterator) + assert iterator._query == query_module.QueryOptions(project="testing") + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_iter_with_projection(): + query = query_module.Query() + foo = model.IntegerProperty() + foo._name = "foo" + iterator = query.iter(projection=(foo,)) + assert isinstance(iterator, _datastore_query.QueryIterator) + assert iterator._query == query_module.QueryOptions( + project="testing", projection=["foo"] + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test___iter__(): + query = query_module.Query() + iterator = iter(query) + assert isinstance(iterator, _datastore_query.QueryIterator) + assert iterator._query == query_module.QueryOptions(project="testing") + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query") + def test_map(_datastore_query): + class DummyQueryIterator: + def __init__(self, items): + self.items = list(items) + + def has_next_async(self): + return utils.future_result(bool(self.items)) + + def next(self): + return self.items.pop(0) + + _datastore_query.iterate.return_value = DummyQueryIterator(range(5)) + + def callback(result): + return result + 1 + + query = query_module.Query() + assert query.map(callback) == (1, 2, 3, 4, 5) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query") + def test_map_empty_result_set(_datastore_query): + class DummyQueryIterator: + def __init__(self, items): + self.items = list(items) + + def has_next_async(self): + return utils.future_result(bool(self.items)) + + _datastore_query.iterate.return_value = DummyQueryIterator(()) + + def callback(result): # pragma: NO COVER + raise Exception("Shouldn't get called.") + + query = query_module.Query() + assert query.map(callback) == () + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query") + def test_map_async(_datastore_query): + class DummyQueryIterator: + def __init__(self, items): + self.items = list(items) + + def has_next_async(self): + return utils.future_result(bool(self.items)) + + def next(self): + return self.items.pop(0) + + _datastore_query.iterate.return_value = DummyQueryIterator(range(5)) + + def callback(result): + return utils.future_result(result + 1) + + query = query_module.Query() + future = query.map_async(callback) + assert future.result() == (1, 2, 3, 4, 5) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_map_pass_batch_into_callback(): + query = query_module.Query() + with pytest.raises(NotImplementedError): + query.map(None, pass_batch_into_callback=True) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_map_merge_future(): + query = query_module.Query() + with pytest.raises(NotImplementedError): + query.map(None, merge_future="hi mom!") + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query") + def test_get(_datastore_query): + query = query_module.Query() + _datastore_query.fetch.return_value = utils.future_result(["foo", "bar"]) + assert query.get() == "foo" + _datastore_query.fetch.assert_called_once_with( + query_module.QueryOptions(project="testing", limit=1) + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query") + def test_get_no_results(_datastore_query): + query = query_module.Query() + _datastore_query.fetch.return_value = utils.future_result([]) + assert query.get() is None + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query") + def test_get_async(_datastore_query): + query = query_module.Query() + _datastore_query.fetch.return_value = utils.future_result(["foo", "bar"]) + future = query.get_async() + assert future.result() == "foo" + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_fetch_page_multiquery(): + query = query_module.Query() + query.filters = mock.Mock(_multiquery=True) + with pytest.raises(TypeError): + query.fetch_page(5) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query") + def test_fetch_page_first_page(_datastore_query): + class DummyQueryIterator: + _more_results_after_limit = True + + def __init__(self): + self.items = list(range(5)) + + def has_next_async(self): + return utils.future_result(bool(self.items)) + + def next(self): + item = self.items.pop(0) + return mock.Mock( + entity=mock.Mock(return_value=item), + cursor="cursor{}".format(item), + ) + + _datastore_query.iterate.return_value = DummyQueryIterator() + query = query_module.Query() + query.filters = mock.Mock( + _multiquery=False, + _post_filters=mock.Mock(return_value=False), + ) + results, cursor, more = query.fetch_page(5) + assert results == [0, 1, 2, 3, 4] + assert cursor == "cursor4" + assert more + + _datastore_query.iterate.assert_called_once_with( + query_module.QueryOptions( + filters=query.filters, + project="testing", + limit=5, + ), + raw=True, + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query") + def test_fetch_page_last_page(_datastore_query): + class DummyQueryIterator: + _more_results_after_limit = False + + def __init__(self): + self.items = list(range(5)) + + def has_next_async(self): + return utils.future_result(bool(self.items)) + + def probably_has_next(self): + return bool(self.items) + + def next(self): + item = self.items.pop(0) + return mock.Mock( + entity=mock.Mock(return_value=item), + cursor="cursor{}".format(item), + ) + + _datastore_query.iterate.return_value = DummyQueryIterator() + query = query_module.Query() + results, cursor, more = query.fetch_page(5, start_cursor="cursor000") + assert results == [0, 1, 2, 3, 4] + assert cursor == "cursor4" + assert not more + + _datastore_query.iterate.assert_called_once_with( + query_module.QueryOptions( + project="testing", + limit=5, + start_cursor="cursor000", + ), + raw=True, + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query") + def test_fetch_page_beyond_last_page(_datastore_query): + class DummyQueryIterator: + # Emulates the Datastore emulator behavior + _more_results_after_limit = True + + def __init__(self): + self.items = [] + + def has_next_async(self): + return utils.future_result(False) + + _datastore_query.iterate.return_value = DummyQueryIterator() + query = query_module.Query() + results, cursor, more = query.fetch_page(5, start_cursor="cursor000") + assert results == [] + assert not more + + _datastore_query.iterate.assert_called_once_with( + query_module.QueryOptions( + project="testing", + limit=5, + start_cursor="cursor000", + ), + raw=True, + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query") + def test_fetch_page_no_results(_datastore_query): + class DummyQueryIterator: + _more_results_after_limit = True + + def __init__(self): + self.items = [] + + def has_next_async(self): + return utils.future_result(bool(self.items)) + + _datastore_query.iterate.return_value = DummyQueryIterator() + query = query_module.Query() + query.filters = mock.Mock( + _multiquery=False, + _post_filters=mock.Mock(return_value=False), + ) + results, cursor, more = query.fetch_page(5) + assert results == [] + assert cursor is None + assert more is False + + _datastore_query.iterate.assert_called_once_with( + query_module.QueryOptions( + filters=query.filters, + project="testing", + limit=5, + ), + raw=True, + ) + + @staticmethod + @pytest.mark.usefixtures("in_context") + @mock.patch("google.cloud.ndb._datastore_query") + def test_fetch_page_async(_datastore_query): + class DummyQueryIterator: + _more_results_after_limit = True + + def __init__(self): + self.items = list(range(5)) + + def has_next_async(self): + return utils.future_result(bool(self.items)) + + def next(self): + item = self.items.pop(0) + return mock.Mock( + entity=mock.Mock(return_value=item), + cursor="cursor{}".format(item), + ) + + _datastore_query.iterate.return_value = DummyQueryIterator() + query = query_module.Query() + future = query.fetch_page_async(5) + results, cursor, more = future.result() + assert results == [0, 1, 2, 3, 4] + assert cursor == "cursor4" + assert more + + _datastore_query.iterate.assert_called_once_with( + query_module.QueryOptions(project="testing", limit=5), + raw=True, + ) + + +class TestGQL: + @staticmethod + @pytest.mark.usefixtures("in_context") + @pytest.mark.filterwarnings("ignore") + def test_gql(): + class SomeKind(model.Model): + prop1 = model.StringProperty() + prop2 = model.StringProperty() + prop3 = model.IntegerProperty() + prop4 = model.IntegerProperty() + + rep = ( + "Query(kind='SomeKind', filters=AND(FilterNode('prop2', '=', {}" + "), FilterNode('prop3', '>', 5)), order_by=[PropertyOrder(name=" + "'prop4', reverse=False)], limit=10, offset=5, " + "projection=['prop1', 'prop2'])" + ) + gql_query = ( + "SELECT prop1, prop2 FROM SomeKind WHERE prop3>5 and prop2='xxx' " + "ORDER BY prop4 LIMIT 10 OFFSET 5" + ) + query = query_module.gql(gql_query) + compat_rep = "'xxx'" + assert query.__repr__() == rep.format(compat_rep) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_gql_with_bind_positional(): + class SomeKind(model.Model): + prop1 = model.StringProperty() + prop2 = model.StringProperty() + prop3 = model.IntegerProperty() + prop4 = model.IntegerProperty() + + rep = ( + "Query(kind='SomeKind', filters=AND(FilterNode('prop2', '=', {}" + "), FilterNode('prop3', '>', 5)), order_by=[PropertyOrder(name=" + "'prop4', reverse=False)], limit=10, offset=5, " + "projection=['prop1', 'prop2'])" + ) + gql_query = ( + "SELECT prop1, prop2 FROM SomeKind WHERE prop3>:1 AND prop2=:2 " + "ORDER BY prop4 LIMIT 10 OFFSET 5" + ) + positional = [5, "xxx"] + query = query_module.gql(gql_query, *positional) + compat_rep = "'xxx'" + assert query.__repr__() == rep.format(compat_rep) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_gql_with_bind_keywords(): + class SomeKind(model.Model): + prop1 = model.StringProperty() + prop2 = model.StringProperty() + prop3 = model.IntegerProperty() + prop4 = model.IntegerProperty() + + rep = ( + "Query(kind='SomeKind', filters=AND(FilterNode('prop2', '=', {}" + "), FilterNode('prop3', '>', 5)), order_by=[PropertyOrder(name=" + "'prop4', reverse=False)], limit=10, offset=5, " + "projection=['prop1', 'prop2'])" + ) + gql_query = ( + "SELECT prop1, prop2 FROM SomeKind WHERE prop3 > :param1 and " + "prop2 = :param2 ORDER BY prop4 LIMIT 10 OFFSET 5" + ) + keywords = {"param1": 5, "param2": "xxx"} + query = query_module.gql(gql_query, **keywords) + compat_rep = "'xxx'" + assert query.__repr__() == rep.format(compat_rep) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_gql_with_bind_mixed(): + class SomeKind(model.Model): + prop1 = model.StringProperty() + prop2 = model.StringProperty() + prop3 = model.IntegerProperty() + prop4 = model.IntegerProperty() + + rep = ( + "Query(kind='SomeKind', filters=AND(FilterNode('prop2', '=', {}" + "), FilterNode('prop3', '>', 5)), order_by=[PropertyOrder(name=" + "'prop4', reverse=False)], limit=10, offset=5, " + "projection=['prop1', 'prop2'])" + ) + gql_query = ( + "SELECT prop1, prop2 FROM SomeKind WHERE prop3 > :1 and " + "prop2 = :param1 ORDER BY prop4 LIMIT 10 OFFSET 5" + ) + positional = [5] + keywords = {"param1": "xxx"} + query = query_module.gql(gql_query, *positional, **keywords) + compat_rep = "'xxx'" + assert query.__repr__() == rep.format(compat_rep) + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_gql_with_bind_not_in(): + class SomeKind(model.Model): + prop1 = model.StringProperty() + + query = query_module.gql( + "SELECT * FROM SomeKind WHERE prop1 not in :1", ["a", "b", "c"] + ) + assert ( + query.__repr__() + == "Query(kind='SomeKind', filters=FilterNode('prop1', 'not_in', ['a', 'b', 'c']), order_by=[], offset=0)" + ) diff --git a/packages/google-cloud-ndb/tests/unit/test_stats.py b/packages/google-cloud-ndb/tests/unit/test_stats.py new file mode 100644 index 000000000000..265d45e629c0 --- /dev/null +++ b/packages/google-cloud-ndb/tests/unit/test_stats.py @@ -0,0 +1,413 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime + +from google.cloud.ndb import stats + +from . import utils + + +DEFAULTS = { + "bytes": 4, + "count": 2, + "timestamp": datetime.datetime.utcfromtimestamp(40), +} + + +def test___all__(): + utils.verify___all__(stats) + + +class TestBaseStatistic: + @staticmethod + def test_get_kind(): + kind = stats.BaseStatistic.STORED_KIND_NAME + assert stats.BaseStatistic._get_kind() == kind + + @staticmethod + def test_constructor(): + stat = stats.BaseStatistic(**DEFAULTS) + assert stat.bytes == 4 + assert stat.count == 2 + + +class TestBaseKindStatistic: + @staticmethod + def test_get_kind(): + kind = stats.BaseKindStatistic.STORED_KIND_NAME + assert stats.BaseKindStatistic._get_kind() == kind + + @staticmethod + def test_constructor(): + stat = stats.BaseKindStatistic(kind_name="test_stat", **DEFAULTS) + assert stat.bytes == 4 + assert stat.count == 2 + assert stat.kind_name == "test_stat" + assert stat.entity_bytes == 0 + + +class TestGlobalStat: + @staticmethod + def test_get_kind(): + kind = stats.GlobalStat.STORED_KIND_NAME + assert stats.GlobalStat._get_kind() == kind + + @staticmethod + def test_constructor(): + stat = stats.GlobalStat(composite_index_count=5, **DEFAULTS) + assert stat.bytes == 4 + assert stat.count == 2 + assert stat.entity_bytes == 0 + assert stat.builtin_index_bytes == 0 + assert stat.builtin_index_count == 0 + assert stat.composite_index_bytes == 0 + assert stat.composite_index_count == 5 + + +class TestNamespaceStat: + @staticmethod + def test_get_kind(): + kind = stats.NamespaceStat.STORED_KIND_NAME + assert stats.NamespaceStat._get_kind() == kind + + @staticmethod + def test_constructor(): + stat = stats.NamespaceStat(subject_namespace="test", **DEFAULTS) + assert stat.bytes == 4 + assert stat.count == 2 + assert stat.subject_namespace == "test" + assert stat.entity_bytes == 0 + assert stat.builtin_index_bytes == 0 + assert stat.builtin_index_count == 0 + assert stat.composite_index_bytes == 0 + assert stat.composite_index_count == 0 + + +class TestKindStat: + @staticmethod + def test_get_kind(): + kind = stats.KindStat.STORED_KIND_NAME + assert stats.KindStat._get_kind() == kind + + @staticmethod + def test_constructor(): + stat = stats.KindStat( + kind_name="test_stat", composite_index_count=2, **DEFAULTS + ) + assert stat.bytes == 4 + assert stat.count == 2 + assert stat.kind_name == "test_stat" + assert stat.entity_bytes == 0 + assert stat.builtin_index_bytes == 0 + assert stat.builtin_index_count == 0 + assert stat.composite_index_bytes == 0 + assert stat.composite_index_count == 2 + + +class TestKindRootEntityStat: + @staticmethod + def test_get_kind(): + kind = stats.KindRootEntityStat.STORED_KIND_NAME + assert stats.KindRootEntityStat._get_kind() == kind + + @staticmethod + def test_constructor(): + stat = stats.KindRootEntityStat(kind_name="test_stat", **DEFAULTS) + assert stat.bytes == 4 + assert stat.count == 2 + assert stat.kind_name == "test_stat" + assert stat.entity_bytes == 0 + + +class TestKindNonRootEntityStat: + @staticmethod + def test_get_kind(): + kind = stats.KindNonRootEntityStat.STORED_KIND_NAME + assert stats.KindNonRootEntityStat._get_kind() == kind + + @staticmethod + def test_constructor(): + stat = stats.KindNonRootEntityStat(kind_name="test_stat", **DEFAULTS) + assert stat.bytes == 4 + assert stat.count == 2 + assert stat.kind_name == "test_stat" + assert stat.entity_bytes == 0 + + +class TestPropertyTypeStat: + @staticmethod + def test_get_kind(): + kind = stats.PropertyTypeStat.STORED_KIND_NAME + assert stats.PropertyTypeStat._get_kind() == kind + + @staticmethod + def test_constructor(): + stat = stats.PropertyTypeStat(property_type="test_property", **DEFAULTS) + assert stat.bytes == 4 + assert stat.count == 2 + assert stat.property_type == "test_property" + assert stat.entity_bytes == 0 + assert stat.builtin_index_bytes == 0 + assert stat.builtin_index_count == 0 + + +class TestKindPropertyTypeStat: + @staticmethod + def test_get_kind(): + kind = stats.KindPropertyTypeStat.STORED_KIND_NAME + assert stats.KindPropertyTypeStat._get_kind() == kind + + @staticmethod + def test_constructor(): + stat = stats.KindPropertyTypeStat( + kind_name="test_stat", property_type="test_property", **DEFAULTS + ) + assert stat.bytes == 4 + assert stat.count == 2 + assert stat.kind_name == "test_stat" + assert stat.entity_bytes == 0 + assert stat.property_type == "test_property" + assert stat.builtin_index_bytes == 0 + assert stat.builtin_index_count == 0 + + +class TestKindPropertyNameStat: + @staticmethod + def test_get_kind(): + kind = stats.KindPropertyNameStat.STORED_KIND_NAME + assert stats.KindPropertyNameStat._get_kind() == kind + + @staticmethod + def test_constructor(): + stat = stats.KindPropertyNameStat( + kind_name="test_stat", property_name="test_property", **DEFAULTS + ) + assert stat.bytes == 4 + assert stat.count == 2 + assert stat.kind_name == "test_stat" + assert stat.entity_bytes == 0 + assert stat.property_name == "test_property" + assert stat.builtin_index_bytes == 0 + assert stat.builtin_index_count == 0 + + +class TestKindPropertyNamePropertyTypeStat: + @staticmethod + def test_get_kind(): + kind = stats.KindPropertyNamePropertyTypeStat.STORED_KIND_NAME + assert stats.KindPropertyNamePropertyTypeStat._get_kind() == kind + + @staticmethod + def test_constructor(): + stat = stats.KindPropertyNamePropertyTypeStat( + kind_name="test_stat", + property_name="test_name", + property_type="test_type", + **DEFAULTS + ) + assert stat.bytes == 4 + assert stat.count == 2 + assert stat.kind_name == "test_stat" + assert stat.entity_bytes == 0 + assert stat.property_type == "test_type" + assert stat.property_name == "test_name" + assert stat.builtin_index_bytes == 0 + assert stat.builtin_index_count == 0 + + +class TestKindCompositeIndexStat: + @staticmethod + def test_get_kind(): + kind = stats.KindCompositeIndexStat.STORED_KIND_NAME + assert stats.KindCompositeIndexStat._get_kind() == kind + + @staticmethod + def test_constructor(): + stat = stats.KindCompositeIndexStat( + index_id=1, kind_name="test_kind", **DEFAULTS + ) + assert stat.bytes == 4 + assert stat.count == 2 + assert stat.index_id == 1 + assert stat.kind_name == "test_kind" + + +class TestNamespaceGlobalStat: + @staticmethod + def test_get_kind(): + kind = stats.NamespaceGlobalStat.STORED_KIND_NAME + assert stats.NamespaceGlobalStat._get_kind() == kind + + @staticmethod + def test_constructor(): + stat = stats.NamespaceGlobalStat(composite_index_count=5, **DEFAULTS) + assert stat.bytes == 4 + assert stat.count == 2 + assert stat.entity_bytes == 0 + assert stat.builtin_index_bytes == 0 + assert stat.builtin_index_count == 0 + assert stat.composite_index_bytes == 0 + assert stat.composite_index_count == 5 + + +class TestNamespaceKindCompositeIndexStat: + @staticmethod + def test_get_kind(): + kind = stats.NamespaceKindCompositeIndexStat.STORED_KIND_NAME + assert stats.NamespaceKindCompositeIndexStat._get_kind() == kind + + @staticmethod + def test_constructor(): + stat = stats.NamespaceKindCompositeIndexStat( + index_id=1, kind_name="test_kind", **DEFAULTS + ) + assert stat.bytes == 4 + assert stat.count == 2 + assert stat.index_id == 1 + assert stat.kind_name == "test_kind" + + +class TestNamespaceKindNonRootEntityStat: + @staticmethod + def test_get_kind(): + kind = stats.NamespaceKindNonRootEntityStat.STORED_KIND_NAME + assert stats.NamespaceKindNonRootEntityStat._get_kind() == kind + + @staticmethod + def test_constructor(): + stat = stats.NamespaceKindNonRootEntityStat(kind_name="test_stat", **DEFAULTS) + assert stat.bytes == 4 + assert stat.count == 2 + assert stat.kind_name == "test_stat" + assert stat.entity_bytes == 0 + + +class TestNamespaceKindPropertyNamePropertyTypeStat: + @staticmethod + def test_get_kind(): + kind = stats.NamespaceKindPropertyNamePropertyTypeStat.STORED_KIND_NAME + assert stats.NamespaceKindPropertyNamePropertyTypeStat._get_kind() == kind + + @staticmethod + def test_constructor(): + stat = stats.NamespaceKindPropertyNamePropertyTypeStat( + kind_name="test_stat", + property_name="test_name", + property_type="test_type", + **DEFAULTS + ) + assert stat.bytes == 4 + assert stat.count == 2 + assert stat.kind_name == "test_stat" + assert stat.entity_bytes == 0 + assert stat.property_type == "test_type" + assert stat.property_name == "test_name" + assert stat.builtin_index_bytes == 0 + assert stat.builtin_index_count == 0 + + +class TestNamespaceKindPropertyNameStat: + @staticmethod + def test_get_kind(): + kind = stats.NamespaceKindPropertyNameStat.STORED_KIND_NAME + assert stats.NamespaceKindPropertyNameStat._get_kind() == kind + + @staticmethod + def test_constructor(): + stat = stats.NamespaceKindPropertyNameStat( + kind_name="test_stat", property_name="test_property", **DEFAULTS + ) + assert stat.bytes == 4 + assert stat.count == 2 + assert stat.kind_name == "test_stat" + assert stat.entity_bytes == 0 + assert stat.property_name == "test_property" + assert stat.builtin_index_bytes == 0 + assert stat.builtin_index_count == 0 + + +class TestNamespaceKindPropertyTypeStat: + @staticmethod + def test_get_kind(): + kind = stats.NamespaceKindPropertyTypeStat.STORED_KIND_NAME + assert stats.NamespaceKindPropertyTypeStat._get_kind() == kind + + @staticmethod + def test_constructor(): + stat = stats.NamespaceKindPropertyTypeStat( + kind_name="test_stat", property_type="test_property", **DEFAULTS + ) + assert stat.bytes == 4 + assert stat.count == 2 + assert stat.kind_name == "test_stat" + assert stat.entity_bytes == 0 + assert stat.property_type == "test_property" + assert stat.builtin_index_bytes == 0 + assert stat.builtin_index_count == 0 + + +class TestNamespaceKindRootEntityStat: + @staticmethod + def test_get_kind(): + kind = stats.NamespaceKindRootEntityStat.STORED_KIND_NAME + assert stats.NamespaceKindRootEntityStat._get_kind() == kind + + @staticmethod + def test_constructor(): + stat = stats.NamespaceKindRootEntityStat(kind_name="test_stat", **DEFAULTS) + assert stat.bytes == 4 + assert stat.count == 2 + assert stat.kind_name == "test_stat" + assert stat.entity_bytes == 0 + + +class TestNamespacePropertyTypeStat: + @staticmethod + def test_get_kind(): + kind = stats.NamespacePropertyTypeStat.STORED_KIND_NAME + assert stats.NamespacePropertyTypeStat._get_kind() == kind + + @staticmethod + def test_constructor(): + stat = stats.NamespacePropertyTypeStat( + property_type="test_property", **DEFAULTS + ) + assert stat.bytes == 4 + assert stat.count == 2 + assert stat.property_type == "test_property" + assert stat.entity_bytes == 0 + assert stat.builtin_index_bytes == 0 + assert stat.builtin_index_count == 0 + + +class TestNamespaceKindStat: + @staticmethod + def test_get_kind(): + kind = stats.NamespaceKindStat.STORED_KIND_NAME + assert stats.NamespaceKindStat._get_kind() == kind + + @staticmethod + def test_constructor(): + stat = stats.NamespaceKindStat( + kind_name="test_stat", composite_index_count=2, **DEFAULTS + ) + assert stat.bytes == 4 + assert stat.count == 2 + assert stat.kind_name == "test_stat" + assert stat.entity_bytes == 0 + assert stat.builtin_index_bytes == 0 + assert stat.builtin_index_count == 0 + assert stat.composite_index_bytes == 0 + assert stat.composite_index_count == 2 diff --git a/packages/google-cloud-ndb/tests/unit/test_tasklets.py b/packages/google-cloud-ndb/tests/unit/test_tasklets.py new file mode 100644 index 000000000000..b88c1af2c561 --- /dev/null +++ b/packages/google-cloud-ndb/tests/unit/test_tasklets.py @@ -0,0 +1,753 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from unittest import mock + +import pytest + +from google.cloud.ndb import context as context_module +from google.cloud.ndb import _eventloop +from google.cloud.ndb import exceptions +from google.cloud.ndb import _remote +from google.cloud.ndb import tasklets + +from . import utils + + +def test___all__(): + utils.verify___all__(tasklets) + + +def test_add_flow_exception(): + with pytest.raises(NotImplementedError): + tasklets.add_flow_exception() + + +class TestFuture: + @staticmethod + def test_constructor(): + future = tasklets.Future() + assert future.running() + assert not future.done() + assert future.info == "Unknown" + + @staticmethod + def test_constructor_w_info(): + future = tasklets.Future("Testing") + assert future.running() + assert not future.done() + assert future.info == "Testing" + + @staticmethod + def test___repr__(): + future = tasklets.Future("The Children") + assert repr(future) == "Future('The Children') <{}>".format(id(future)) + + @staticmethod + def test_set_result(): + future = tasklets.Future() + future.set_result(42) + assert future.result() == 42 + assert future.get_result() == 42 + assert future.done() + assert not future.running() + assert future.exception() is None + assert future.get_exception() is None + assert future.get_traceback() is None + + @staticmethod + def test_set_result_already_done(): + future = tasklets.Future() + future.set_result(42) + with pytest.raises(RuntimeError): + future.set_result(42) + + @staticmethod + def test_add_done_callback(): + callback1 = mock.Mock() + callback2 = mock.Mock() + future = tasklets.Future() + future.add_done_callback(callback1) + future.add_done_callback(callback2) + future.set_result(42) + + callback1.assert_called_once_with(future) + callback2.assert_called_once_with(future) + + @staticmethod + def test_add_done_callback_already_done(): + callback = mock.Mock() + future = tasklets.Future() + future.set_result(42) + future.add_done_callback(callback) + callback.assert_called_once_with(future) + + @staticmethod + def test_set_exception(): + future = tasklets.Future() + error = Exception("Spurious Error") + future.set_exception(error) + assert future.exception() is error + assert future.get_exception() is error + assert future.get_traceback() is getattr(error, "__traceback__", None) + with pytest.raises(Exception): + future.result() + + @staticmethod + def test_set_exception_with_callback(): + callback = mock.Mock() + future = tasklets.Future() + future.add_done_callback(callback) + error = Exception("Spurious Error") + future.set_exception(error) + assert future.exception() is error + assert future.get_exception() is error + assert future.get_traceback() is getattr(error, "__traceback__", None) + callback.assert_called_once_with(future) + + @staticmethod + def test_set_exception_already_done(): + future = tasklets.Future() + error = Exception("Spurious Error") + future.set_exception(error) + with pytest.raises(RuntimeError): + future.set_exception(error) + + @staticmethod + @mock.patch("google.cloud.ndb.tasklets._eventloop") + def test_wait(_eventloop): + def side_effects(future): + yield True + yield True + future.set_result(42) + yield True + + future = tasklets.Future() + _eventloop.run1.side_effect = side_effects(future) + future.wait() + assert future.result() == 42 + assert _eventloop.run1.call_count == 3 + + @staticmethod + @mock.patch("google.cloud.ndb.tasklets._eventloop") + def test_wait_loop_exhausted(_eventloop): + future = tasklets.Future() + _eventloop.run1.return_value = False + with pytest.raises(RuntimeError): + future.wait() + + @staticmethod + @mock.patch("google.cloud.ndb.tasklets._eventloop") + def test_check_success(_eventloop): + def side_effects(future): + yield True + yield True + future.set_result(42) + yield True + + future = tasklets.Future() + _eventloop.run1.side_effect = side_effects(future) + future.check_success() + assert future.result() == 42 + assert _eventloop.run1.call_count == 3 + + @staticmethod + @mock.patch("google.cloud.ndb.tasklets._eventloop") + def test_check_success_failure(_eventloop): + error = Exception("Spurious error") + + def side_effects(future): + yield True + yield True + future.set_exception(error) + yield True + + future = tasklets.Future() + _eventloop.run1.side_effect = side_effects(future) + with pytest.raises(Exception) as error_context: + future.check_success() + + assert error_context.value is error + + @staticmethod + @mock.patch("google.cloud.ndb.tasklets._eventloop") + def test_result_block_for_result(_eventloop): + def side_effects(future): + yield True + yield True + future.set_result(42) + yield True + + future = tasklets.Future() + _eventloop.run1.side_effect = side_effects(future) + assert future.result() == 42 + assert _eventloop.run1.call_count == 3 + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_cancel(): + # Integration test. Actually test that a cancel propagates properly. + rpc = tasklets.Future("Fake RPC") + wrapped_rpc = _remote.RemoteCall(rpc, "Wrapped Fake RPC") + + @tasklets.tasklet + def inner_tasklet(): + yield wrapped_rpc + + @tasklets.tasklet + def outer_tasklet(): + yield inner_tasklet() + + future = outer_tasklet() + assert not future.cancelled() + future.cancel() + assert rpc.cancelled() + + with pytest.raises(exceptions.Cancelled): + future.result() + + assert future.cancelled() + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_cancel_already_done(): + future = tasklets.Future("testing") + future.set_result(42) + future.cancel() # noop + assert not future.cancelled() + assert future.result() == 42 + + @staticmethod + def test_cancelled(): + future = tasklets.Future() + assert future.cancelled() is False + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_wait_any(): + futures = [tasklets.Future() for _ in range(3)] + + def callback(): + futures[1].set_result(42) + + _eventloop.add_idle(callback) + + future = tasklets.Future.wait_any(futures) + assert future is futures[1] + assert future.result() == 42 + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_wait_any_loop_exhausted(): + futures = [tasklets.Future() for _ in range(3)] + + with pytest.raises(RuntimeError): + tasklets.Future.wait_any(futures) + + @staticmethod + def test_wait_any_no_futures(): + assert tasklets.Future.wait_any(()) is None + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_wait_all(): + futures = [tasklets.Future() for _ in range(3)] + + def make_callback(index, result): + def callback(): + futures[index].set_result(result) + + return callback + + _eventloop.add_idle(make_callback(0, 42)) + _eventloop.add_idle(make_callback(1, 43)) + _eventloop.add_idle(make_callback(2, 44)) + + tasklets.Future.wait_all(futures) + assert futures[0].done() + assert futures[0].result() == 42 + assert futures[1].done() + assert futures[1].result() == 43 + assert futures[2].done() + assert futures[2].result() == 44 + + @staticmethod + def test_wait_all_no_futures(): + assert tasklets.Future.wait_all(()) is None + + +class Test_TaskletFuture: + @staticmethod + def test_constructor(): + generator = object() + context = object() + future = tasklets._TaskletFuture(generator, context) + assert future.generator is generator + assert future.context is context + assert future.info == "Unknown" + + @staticmethod + def test___repr__(): + future = tasklets._TaskletFuture(None, None, info="Female") + assert repr(future) == "_TaskletFuture('Female') <{}>".format(id(future)) + + @staticmethod + def test__advance_tasklet_return(in_context): + def generator_function(): + yield + raise tasklets.Return(42) + + generator = generator_function() + next(generator) # skip ahead to return + future = tasklets._TaskletFuture(generator, in_context) + future._advance_tasklet() + assert future.result() == 42 + + @staticmethod + def test__advance_tasklet_generator_raises(in_context): + error = Exception("Spurious error.") + + def generator_function(): + yield + raise error + + generator = generator_function() + next(generator) # skip ahead to return + future = tasklets._TaskletFuture(generator, in_context) + future._advance_tasklet() + assert future.exception() is error + + @staticmethod + def test__advance_tasklet_bad_yield(in_context): + def generator_function(): + yield 42 + + generator = generator_function() + future = tasklets._TaskletFuture(generator, in_context) + with pytest.raises(RuntimeError): + future._advance_tasklet() + + @staticmethod + def test__advance_tasklet_dependency_returns(in_context): + def generator_function(dependency): + some_value = yield dependency + raise tasklets.Return(some_value + 42) + + dependency = tasklets.Future() + generator = generator_function(dependency) + future = tasklets._TaskletFuture(generator, in_context) + future._advance_tasklet() + dependency.set_result(21) + assert future.result() == 63 + + @staticmethod + def test__advance_tasklet_dependency_raises(in_context): + def generator_function(dependency): + yield dependency + + error = Exception("Spurious error.") + dependency = tasklets.Future() + generator = generator_function(dependency) + future = tasklets._TaskletFuture(generator, in_context) + future._advance_tasklet() + dependency.set_exception(error) + assert future.exception() is error + with pytest.raises(Exception): + future.result() + + @staticmethod + def test__advance_tasklet_dependency_raises_with_try_except(in_context): + def generator_function(dependency, error_handler): + try: + yield dependency + except Exception: + result = yield error_handler + raise tasklets.Return(result) + + error = Exception("Spurious error.") + dependency = tasklets.Future() + error_handler = tasklets.Future() + generator = generator_function(dependency, error_handler) + future = tasklets._TaskletFuture(generator, in_context) + future._advance_tasklet() + dependency.set_exception(error) + assert future.running() + error_handler.set_result("hi mom!") + assert future.result() == "hi mom!" + + @staticmethod + def test__advance_tasklet_yields_rpc(in_context): + def generator_function(dependency): + value = yield dependency + raise tasklets.Return(value + 3) + + dependency = mock.Mock(spec=_remote.RemoteCall) + dependency.exception.return_value = None + dependency.result.return_value = 8 + generator = generator_function(dependency) + future = tasklets._TaskletFuture(generator, in_context) + future._advance_tasklet() + + callback = dependency.add_done_callback.call_args[0][0] + callback(dependency) + _eventloop.run() + assert future.result() == 11 + + @staticmethod + def test__advance_tasklet_parallel_yield(in_context): + def generator_function(dependencies): + one, two = yield dependencies + raise tasklets.Return(one + two) + + dependencies = (tasklets.Future(), tasklets.Future()) + generator = generator_function(dependencies) + future = tasklets._TaskletFuture(generator, in_context) + future._advance_tasklet() + dependencies[0].set_result(8) + dependencies[1].set_result(3) + assert future.result() == 11 + assert future.context is in_context + + @staticmethod + def test_cancel_not_waiting(in_context): + dependency = tasklets.Future() + future = tasklets._TaskletFuture(None, in_context) + future.cancel() + + assert not dependency.cancelled() + with pytest.raises(exceptions.Cancelled): + future.result() + + @staticmethod + def test_cancel_waiting_on_dependency(in_context): + def generator_function(dependency): + yield dependency + + dependency = tasklets.Future() + generator = generator_function(dependency) + future = tasklets._TaskletFuture(generator, in_context) + future._advance_tasklet() + future.cancel() + + assert dependency.cancelled() + with pytest.raises(exceptions.Cancelled): + future.result() + + +class Test_MultiFuture: + @staticmethod + def test___repr__(): + this, that = (tasklets.Future("this"), tasklets.Future("that")) + future = tasklets._MultiFuture((this, that)) + assert repr(future) == ( + "_MultiFuture(Future('this') <{}>," + " Future('that') <{}>) <{}>".format(id(this), id(that), id(future)) + ) + + @staticmethod + def test_success(): + dependencies = (tasklets.Future(), tasklets.Future()) + future = tasklets._MultiFuture(dependencies) + dependencies[0].set_result("one") + dependencies[1].set_result("two") + assert future.result() == ("one", "two") + + @staticmethod + def test_error(): + dependencies = (tasklets.Future(), tasklets.Future()) + future = tasklets._MultiFuture(dependencies) + error = Exception("Spurious error.") + dependencies[0].set_exception(error) + dependencies[1].set_result("two") + assert future.exception() is error + with pytest.raises(Exception): + future.result() + + @staticmethod + def test_cancel(): + dependencies = (tasklets.Future(), tasklets.Future()) + future = tasklets._MultiFuture(dependencies) + future.cancel() + assert dependencies[0].cancelled() + assert dependencies[1].cancelled() + with pytest.raises(exceptions.Cancelled): + future.result() + + @staticmethod + def test_no_dependencies(): + future = tasklets._MultiFuture(()) + assert future.result() == () + + @staticmethod + def test_nested(): + dependencies = [tasklets.Future() for _ in range(3)] + future = tasklets._MultiFuture((dependencies[0], dependencies[1:])) + for i, dependency in enumerate(dependencies): + dependency.set_result(i) + + assert future.result() == (0, (1, 2)) + + +class Test__get_return_value: + @staticmethod + def test_no_args(): + stop = StopIteration() + assert tasklets._get_return_value(stop) is None + + @staticmethod + def test_one_arg(): + stop = StopIteration(42) + assert tasklets._get_return_value(stop) == 42 + + @staticmethod + def test_two_args(): + stop = StopIteration(42, 21) + assert tasklets._get_return_value(stop) == (42, 21) + + +class Test_tasklet: + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_generator(): + @tasklets.tasklet + def generator(dependency): + value = yield dependency + raise tasklets.Return(value + 3) + + dependency = tasklets.Future() + future = generator(dependency) + assert isinstance(future, tasklets._TaskletFuture) + dependency.set_result(8) + assert future.result() == 11 + + # Can't make this work with 2.7, because the return with argument inside + # generator error crashes the pytest collection process, even with skip + # @staticmethod + # @pytest.mark.skipif(sys.version_info[0] == 2, reason="requires python3") + # @pytest.mark.usefixtures("in_context") + # def test_generator_using_return(): + # @tasklets.tasklet + # def generator(dependency): + # value = yield dependency + # return value + 3 + + # dependency = tasklets.Future() + # future = generator(dependency) + # assert isinstance(future, tasklets._TaskletFuture) + # dependency.set_result(8) + # assert future.result() == 11 + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_regular_function(): + @tasklets.tasklet + def regular_function(value): + return value + 3 + + future = regular_function(8) + assert isinstance(future, tasklets.Future) + assert future.result() == 11 + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_regular_function_raises_Return(): + @tasklets.tasklet + def regular_function(value): + raise tasklets.Return(value + 3) + + future = regular_function(8) + assert isinstance(future, tasklets.Future) + assert future.result() == 11 + + @staticmethod + def test_context_management(in_context): + @tasklets.tasklet + def some_task(transaction, future): + assert context_module.get_context().transaction == transaction + yield future + raise tasklets.Return(context_module.get_context().transaction) + + future_foo = tasklets.Future("foo") + with in_context.new(transaction="foo").use(): + task_foo = some_task("foo", future_foo) + + future_bar = tasklets.Future("bar") + with in_context.new(transaction="bar").use(): + task_bar = some_task("bar", future_bar) + + future_foo.set_result(None) + future_bar.set_result(None) + + assert task_foo.result() == "foo" + assert task_bar.result() == "bar" + + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_context_changed_in_tasklet(): + @tasklets.tasklet + def some_task(transaction, future1, future2): + context = context_module.get_context() + assert context.transaction is None + with context.new(transaction=transaction).use(): + assert context_module.get_context().transaction == transaction + yield future1 + assert context_module.get_context().transaction == transaction + yield future2 + assert context_module.get_context().transaction == transaction + assert context_module.get_context() is context + + future_foo1 = tasklets.Future("foo1") + future_foo2 = tasklets.Future("foo2") + task_foo = some_task("foo", future_foo1, future_foo2) + + future_bar1 = tasklets.Future("bar1") + future_bar2 = tasklets.Future("bar2") + task_bar = some_task("bar", future_bar1, future_bar2) + + future_foo1.set_result(None) + future_bar1.set_result(None) + future_foo2.set_result(None) + future_bar2.set_result(None) + + task_foo.check_success() + task_bar.check_success() + + +class Test_wait_any: + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_it(): + futures = [tasklets.Future() for _ in range(3)] + + def callback(): + futures[1].set_result(42) + + _eventloop.add_idle(callback) + + future = tasklets.wait_any(futures) + assert future is futures[1] + assert future.result() == 42 + + @staticmethod + def test_it_no_futures(): + assert tasklets.wait_any(()) is None + + +class Test_wait_all: + @staticmethod + @pytest.mark.usefixtures("in_context") + def test_it(): + futures = [tasklets.Future() for _ in range(3)] + + def make_callback(index, result): + def callback(): + futures[index].set_result(result) + + return callback + + _eventloop.add_idle(make_callback(0, 42)) + _eventloop.add_idle(make_callback(1, 43)) + _eventloop.add_idle(make_callback(2, 44)) + + tasklets.wait_all(futures) + assert futures[0].done() + assert futures[0].result() == 42 + assert futures[1].done() + assert futures[1].result() == 43 + assert futures[2].done() + assert futures[2].result() == 44 + + @staticmethod + def test_it_no_futures(): + assert tasklets.wait_all(()) is None + + +@pytest.mark.usefixtures("in_context") +@mock.patch("google.cloud.ndb._eventloop.time") +def test_sleep(time_module, context): + time_module.time.side_effect = [0, 0, 1] + future = tasklets.sleep(1) + assert future.get_result() is None + time_module.sleep.assert_called_once_with(1) + + +def test_make_context(): + with pytest.raises(NotImplementedError): + tasklets.make_context() + + +def test_make_default_context(): + with pytest.raises(NotImplementedError): + tasklets.make_default_context() + + +class TestQueueFuture: + @staticmethod + def test_constructor(): + with pytest.raises(NotImplementedError): + tasklets.QueueFuture() + + +class TestReducingFuture: + @staticmethod + def test_constructor(): + with pytest.raises(NotImplementedError): + tasklets.ReducingFuture() + + +def test_Return(): + assert not issubclass(tasklets.Return, StopIteration) + assert issubclass(tasklets.Return, Exception) + + +class TestSerialQueueFuture: + @staticmethod + def test_constructor(): + with pytest.raises(NotImplementedError): + tasklets.SerialQueueFuture() + + +def test_set_context(): + with pytest.raises(NotImplementedError): + tasklets.set_context() + + +@pytest.mark.usefixtures("in_context") +def test_synctasklet(): + @tasklets.synctasklet + def generator_function(value): + future = tasklets.Future(value) + future.set_result(value) + x = yield future + raise tasklets.Return(x + 3) + + result = generator_function(8) + assert result == 11 + + +@pytest.mark.usefixtures("in_context") +def test_toplevel(): + @tasklets.toplevel + def generator_function(value): + future = tasklets.Future(value) + future.set_result(value) + x = yield future + raise tasklets.Return(x + 3) + + idle = mock.Mock(__name__="idle", return_value=None) + _eventloop.add_idle(idle) + + result = generator_function(8) + assert result == 11 + idle.assert_called_once_with() diff --git a/packages/google-cloud-ndb/tests/unit/test_utils.py b/packages/google-cloud-ndb/tests/unit/test_utils.py new file mode 100644 index 000000000000..d22ebc5718ec --- /dev/null +++ b/packages/google-cloud-ndb/tests/unit/test_utils.py @@ -0,0 +1,136 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import threading + +from unittest import mock + +import pytest + +from google.cloud.ndb import utils + + +class Test_asbool: + @staticmethod + def test_None(): + assert utils.asbool(None) is False + + @staticmethod + def test_bool(): + assert utils.asbool(True) is True + assert utils.asbool(False) is False + + @staticmethod + def test_truthy_int(): + assert utils.asbool(0) is False + assert utils.asbool(1) is True + + @staticmethod + def test_truthy_string(): + assert utils.asbool("Y") is True + assert utils.asbool("f") is False + + +def test_code_info(): + with pytest.raises(NotImplementedError): + utils.code_info() + + +def test_decorator(): + with pytest.raises(NotImplementedError): + utils.decorator() + + +def test_frame_info(): + with pytest.raises(NotImplementedError): + utils.frame_info() + + +def test_func_info(): + with pytest.raises(NotImplementedError): + utils.func_info() + + +def test_gen_info(): + with pytest.raises(NotImplementedError): + utils.gen_info() + + +def test_get_stack(): + with pytest.raises(NotImplementedError): + utils.get_stack() + + +class Test_logging_debug: + @staticmethod + @mock.patch("google.cloud.ndb.utils.DEBUG", False) + def test_noop(): + log = mock.Mock(spec=("debug",)) + utils.logging_debug(log, "hello dad! {} {where}", "I'm", where="in jail") + log.debug.assert_not_called() + + @staticmethod + @mock.patch("google.cloud.ndb.utils.DEBUG", True) + def test_log_it(): + log = mock.Mock(spec=("debug",)) + utils.logging_debug(log, "hello dad! {} {where}", "I'm", where="in jail") + log.debug.assert_called_once_with("hello dad! I'm in jail") + + +def test_positional(): + @utils.positional(2) + def test_func(a=1, b=2, **kwargs): + return a, b + + @utils.positional(1) + def test_func2(a=3, **kwargs): + return a + + with pytest.raises(TypeError): + test_func(1, 2, 3) + + with pytest.raises(TypeError): + test_func2(1, 2) + + assert test_func(4, 5, x=0) == (4, 5) + assert test_func(6) == (6, 2) + + assert test_func2(6) == 6 + + +def test_keyword_only(): + @utils.keyword_only(foo=1, bar=2, baz=3) + def test_kwonly(**kwargs): + return kwargs["foo"], kwargs["bar"], kwargs["baz"] + + with pytest.raises(TypeError): + test_kwonly(faz=4) + + assert test_kwonly() == (1, 2, 3) + assert test_kwonly(foo=3, bar=5, baz=7) == (3, 5, 7) + assert test_kwonly(baz=7) == (1, 2, 7) + + +def test_threading_local(): + assert utils.threading_local is threading.local + + +def test_tweak_logging(): + with pytest.raises(NotImplementedError): + utils.tweak_logging() + + +def test_wrapping(): + with pytest.raises(NotImplementedError): + utils.wrapping() diff --git a/packages/google-cloud-ndb/tests/unit/utils.py b/packages/google-cloud-ndb/tests/unit/utils.py new file mode 100644 index 000000000000..e20d4710ec99 --- /dev/null +++ b/packages/google-cloud-ndb/tests/unit/utils.py @@ -0,0 +1,47 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import types + +from google.cloud.ndb import tasklets + + +def verify___all__(module_obj): + expected = [] + for name in dir(module_obj): + if not name.startswith("_"): + value = getattr(module_obj, name) + if not isinstance(value, types.ModuleType): + expected.append(name) + expected.sort(key=str.lower) + assert sorted(module_obj.__all__, key=str.lower) == expected + + +def future_result(result): + """Return a future with the given result.""" + future = tasklets.Future() + future.set_result(result) + return future + + +def future_exception(exception): + """Return a future with the given result.""" + future = tasklets.Future() + future.set_exception(exception) + return future + + +def future_results(*results): + """Return a sequence of futures for the given results.""" + return [future_result(result) for result in results]