From 4344a2ce85b6acef5bbd00eb55afb81b175f8cc3 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Mon, 20 Sep 2021 16:54:19 -0500 Subject: [PATCH 001/210] chore: repository setup with templated files --- packages/db-dtypes/.coveragerc | 38 ++ packages/db-dtypes/.flake8 | 33 ++ packages/db-dtypes/.github/.OwlBot.lock.yaml | 3 + packages/db-dtypes/.github/.OwlBot.yaml | 18 + packages/db-dtypes/.github/CODEOWNERS | 11 + packages/db-dtypes/.github/CONTRIBUTING.md | 28 + .../.github/ISSUE_TEMPLATE/bug_report.md | 43 ++ .../.github/ISSUE_TEMPLATE/feature_request.md | 18 + .../.github/ISSUE_TEMPLATE/support_request.md | 7 + .../.github/PULL_REQUEST_TEMPLATE.md | 7 + .../db-dtypes/.github/header-checker-lint.yml | 15 + packages/db-dtypes/.github/release-please.yml | 1 + packages/db-dtypes/.github/snippet-bot.yml | 0 packages/db-dtypes/.gitignore | 63 +++ packages/db-dtypes/.kokoro/build.sh | 59 +++ .../db-dtypes/.kokoro/continuous/common.cfg | 27 + .../.kokoro/continuous/continuous.cfg | 1 + .../db-dtypes/.kokoro/docker/docs/Dockerfile | 67 +++ packages/db-dtypes/.kokoro/docs/common.cfg | 65 +++ .../db-dtypes/.kokoro/docs/docs-presubmit.cfg | 28 + packages/db-dtypes/.kokoro/docs/docs.cfg | 1 + .../db-dtypes/.kokoro/populate-secrets.sh | 43 ++ .../db-dtypes/.kokoro/presubmit/common.cfg | 27 + .../db-dtypes/.kokoro/presubmit/presubmit.cfg | 1 + packages/db-dtypes/.kokoro/publish-docs.sh | 64 +++ packages/db-dtypes/.kokoro/release.sh | 32 ++ packages/db-dtypes/.kokoro/release/common.cfg | 30 ++ .../db-dtypes/.kokoro/release/release.cfg | 1 + .../db-dtypes/.kokoro/samples/lint/common.cfg | 34 ++ .../.kokoro/samples/lint/continuous.cfg | 6 + .../.kokoro/samples/lint/periodic.cfg | 6 + .../.kokoro/samples/lint/presubmit.cfg | 6 + .../.kokoro/samples/python3.6/common.cfg | 40 ++ .../.kokoro/samples/python3.6/continuous.cfg | 6 + .../samples/python3.6/periodic-head.cfg | 11 + .../.kokoro/samples/python3.6/periodic.cfg | 6 + .../.kokoro/samples/python3.6/presubmit.cfg | 6 + .../.kokoro/samples/python3.7/common.cfg | 40 ++ .../.kokoro/samples/python3.7/continuous.cfg | 6 + .../samples/python3.7/periodic-head.cfg | 11 + .../.kokoro/samples/python3.7/periodic.cfg | 6 + .../.kokoro/samples/python3.7/presubmit.cfg | 6 + .../.kokoro/samples/python3.8/common.cfg | 40 ++ .../.kokoro/samples/python3.8/continuous.cfg | 6 + .../samples/python3.8/periodic-head.cfg | 11 + .../.kokoro/samples/python3.8/periodic.cfg | 6 + .../.kokoro/samples/python3.8/presubmit.cfg | 6 + .../.kokoro/samples/python3.9/common.cfg | 40 ++ .../.kokoro/samples/python3.9/continuous.cfg | 6 + .../samples/python3.9/periodic-head.cfg | 11 + .../.kokoro/samples/python3.9/periodic.cfg | 6 + .../.kokoro/samples/python3.9/presubmit.cfg | 6 + .../.kokoro/test-samples-against-head.sh | 28 + .../db-dtypes/.kokoro/test-samples-impl.sh | 102 ++++ packages/db-dtypes/.kokoro/test-samples.sh | 46 ++ packages/db-dtypes/.kokoro/trampoline.sh | 28 + packages/db-dtypes/.kokoro/trampoline_v2.sh | 487 ++++++++++++++++++ packages/db-dtypes/.pre-commit-config.yaml | 31 ++ packages/db-dtypes/.repo-metadata.json | 12 + packages/db-dtypes/.trampolinerc | 52 ++ packages/db-dtypes/AUTHORS | 9 + packages/db-dtypes/CHANGELOG.md | 5 + packages/db-dtypes/CODE_OF_CONDUCT.md | 95 ++++ packages/db-dtypes/CONTRIBUTING.rst | 279 ++++++++++ packages/db-dtypes/LICENSE | 202 ++++++++ packages/db-dtypes/MANIFEST.in | 25 + packages/db-dtypes/README.rst | 78 +++ packages/db-dtypes/SECURITY.md | 7 + packages/db-dtypes/db_dtypes/__init__.py | 22 + packages/db-dtypes/db_dtypes/version.py | 15 + packages/db-dtypes/dev_requirements.txt | 7 + packages/db-dtypes/docs/README.rst | 1 + packages/db-dtypes/docs/_static/custom.css | 20 + .../db-dtypes/docs/_templates/layout.html | 50 ++ packages/db-dtypes/docs/changelog.md | 1 + packages/db-dtypes/docs/conf.py | 367 +++++++++++++ packages/db-dtypes/docs/index.rst | 19 + packages/db-dtypes/docs/reference.rst | 4 + packages/db-dtypes/noxfile.py | 310 +++++++++++ packages/db-dtypes/owlbot.py | 97 ++++ packages/db-dtypes/release-procedure.md | 35 ++ packages/db-dtypes/renovate.json | 12 + packages/db-dtypes/scripts/decrypt-secrets.sh | 46 ++ .../scripts/readme-gen/readme_gen.py | 68 +++ .../readme-gen/templates/README.tmpl.rst | 87 ++++ .../readme-gen/templates/auth.tmpl.rst | 9 + .../templates/auth_api_key.tmpl.rst | 14 + .../templates/install_deps.tmpl.rst | 29 ++ .../templates/install_portaudio.tmpl.rst | 35 ++ packages/db-dtypes/setup.cfg | 27 + packages/db-dtypes/setup.py | 76 +++ packages/db-dtypes/testing/.gitignore | 3 + .../db-dtypes/testing/constraints-3.10.txt | 0 .../db-dtypes/testing/constraints-3.6.txt | 10 + .../db-dtypes/testing/constraints-3.7.txt | 0 .../db-dtypes/testing/constraints-3.8.txt | 1 + .../db-dtypes/testing/constraints-3.9.txt | 1 + packages/db-dtypes/tests/conftest.py | 28 + 98 files changed, 3939 insertions(+) create mode 100644 packages/db-dtypes/.coveragerc create mode 100644 packages/db-dtypes/.flake8 create mode 100644 packages/db-dtypes/.github/.OwlBot.lock.yaml create mode 100644 packages/db-dtypes/.github/.OwlBot.yaml create mode 100644 packages/db-dtypes/.github/CODEOWNERS create mode 100644 packages/db-dtypes/.github/CONTRIBUTING.md create mode 100644 packages/db-dtypes/.github/ISSUE_TEMPLATE/bug_report.md create mode 100644 packages/db-dtypes/.github/ISSUE_TEMPLATE/feature_request.md create mode 100644 packages/db-dtypes/.github/ISSUE_TEMPLATE/support_request.md create mode 100644 packages/db-dtypes/.github/PULL_REQUEST_TEMPLATE.md create mode 100644 packages/db-dtypes/.github/header-checker-lint.yml create mode 100644 packages/db-dtypes/.github/release-please.yml create mode 100644 packages/db-dtypes/.github/snippet-bot.yml create mode 100644 packages/db-dtypes/.gitignore create mode 100755 packages/db-dtypes/.kokoro/build.sh create mode 100644 packages/db-dtypes/.kokoro/continuous/common.cfg create mode 100644 packages/db-dtypes/.kokoro/continuous/continuous.cfg create mode 100644 packages/db-dtypes/.kokoro/docker/docs/Dockerfile create mode 100644 packages/db-dtypes/.kokoro/docs/common.cfg create mode 100644 packages/db-dtypes/.kokoro/docs/docs-presubmit.cfg create mode 100644 packages/db-dtypes/.kokoro/docs/docs.cfg create mode 100755 packages/db-dtypes/.kokoro/populate-secrets.sh create mode 100644 packages/db-dtypes/.kokoro/presubmit/common.cfg create mode 100644 packages/db-dtypes/.kokoro/presubmit/presubmit.cfg create mode 100755 packages/db-dtypes/.kokoro/publish-docs.sh create mode 100755 packages/db-dtypes/.kokoro/release.sh create mode 100644 packages/db-dtypes/.kokoro/release/common.cfg create mode 100644 packages/db-dtypes/.kokoro/release/release.cfg create mode 100644 packages/db-dtypes/.kokoro/samples/lint/common.cfg create mode 100644 packages/db-dtypes/.kokoro/samples/lint/continuous.cfg create mode 100644 packages/db-dtypes/.kokoro/samples/lint/periodic.cfg create mode 100644 packages/db-dtypes/.kokoro/samples/lint/presubmit.cfg create mode 100644 packages/db-dtypes/.kokoro/samples/python3.6/common.cfg create mode 100644 packages/db-dtypes/.kokoro/samples/python3.6/continuous.cfg create mode 100644 packages/db-dtypes/.kokoro/samples/python3.6/periodic-head.cfg create mode 100644 packages/db-dtypes/.kokoro/samples/python3.6/periodic.cfg create mode 100644 packages/db-dtypes/.kokoro/samples/python3.6/presubmit.cfg create mode 100644 packages/db-dtypes/.kokoro/samples/python3.7/common.cfg create mode 100644 packages/db-dtypes/.kokoro/samples/python3.7/continuous.cfg create mode 100644 packages/db-dtypes/.kokoro/samples/python3.7/periodic-head.cfg create mode 100644 packages/db-dtypes/.kokoro/samples/python3.7/periodic.cfg create mode 100644 packages/db-dtypes/.kokoro/samples/python3.7/presubmit.cfg create mode 100644 packages/db-dtypes/.kokoro/samples/python3.8/common.cfg create mode 100644 packages/db-dtypes/.kokoro/samples/python3.8/continuous.cfg create mode 100644 packages/db-dtypes/.kokoro/samples/python3.8/periodic-head.cfg create mode 100644 packages/db-dtypes/.kokoro/samples/python3.8/periodic.cfg create mode 100644 packages/db-dtypes/.kokoro/samples/python3.8/presubmit.cfg create mode 100644 packages/db-dtypes/.kokoro/samples/python3.9/common.cfg create mode 100644 packages/db-dtypes/.kokoro/samples/python3.9/continuous.cfg create mode 100644 packages/db-dtypes/.kokoro/samples/python3.9/periodic-head.cfg create mode 100644 packages/db-dtypes/.kokoro/samples/python3.9/periodic.cfg create mode 100644 packages/db-dtypes/.kokoro/samples/python3.9/presubmit.cfg create mode 100755 packages/db-dtypes/.kokoro/test-samples-against-head.sh create mode 100755 packages/db-dtypes/.kokoro/test-samples-impl.sh create mode 100755 packages/db-dtypes/.kokoro/test-samples.sh create mode 100755 packages/db-dtypes/.kokoro/trampoline.sh create mode 100755 packages/db-dtypes/.kokoro/trampoline_v2.sh create mode 100644 packages/db-dtypes/.pre-commit-config.yaml create mode 100644 packages/db-dtypes/.repo-metadata.json create mode 100644 packages/db-dtypes/.trampolinerc create mode 100644 packages/db-dtypes/AUTHORS create mode 100644 packages/db-dtypes/CHANGELOG.md create mode 100644 packages/db-dtypes/CODE_OF_CONDUCT.md create mode 100644 packages/db-dtypes/CONTRIBUTING.rst create mode 100644 packages/db-dtypes/LICENSE create mode 100644 packages/db-dtypes/MANIFEST.in create mode 100644 packages/db-dtypes/README.rst create mode 100644 packages/db-dtypes/SECURITY.md create mode 100644 packages/db-dtypes/db_dtypes/__init__.py create mode 100644 packages/db-dtypes/db_dtypes/version.py create mode 100644 packages/db-dtypes/dev_requirements.txt create mode 120000 packages/db-dtypes/docs/README.rst create mode 100644 packages/db-dtypes/docs/_static/custom.css create mode 100644 packages/db-dtypes/docs/_templates/layout.html create mode 120000 packages/db-dtypes/docs/changelog.md create mode 100644 packages/db-dtypes/docs/conf.py create mode 100644 packages/db-dtypes/docs/index.rst create mode 100644 packages/db-dtypes/docs/reference.rst create mode 100644 packages/db-dtypes/noxfile.py create mode 100644 packages/db-dtypes/owlbot.py create mode 100644 packages/db-dtypes/release-procedure.md create mode 100644 packages/db-dtypes/renovate.json create mode 100755 packages/db-dtypes/scripts/decrypt-secrets.sh create mode 100644 packages/db-dtypes/scripts/readme-gen/readme_gen.py create mode 100644 packages/db-dtypes/scripts/readme-gen/templates/README.tmpl.rst create mode 100644 packages/db-dtypes/scripts/readme-gen/templates/auth.tmpl.rst create mode 100644 packages/db-dtypes/scripts/readme-gen/templates/auth_api_key.tmpl.rst create mode 100644 packages/db-dtypes/scripts/readme-gen/templates/install_deps.tmpl.rst create mode 100644 packages/db-dtypes/scripts/readme-gen/templates/install_portaudio.tmpl.rst create mode 100644 packages/db-dtypes/setup.cfg create mode 100644 packages/db-dtypes/setup.py create mode 100644 packages/db-dtypes/testing/.gitignore create mode 100644 packages/db-dtypes/testing/constraints-3.10.txt create mode 100644 packages/db-dtypes/testing/constraints-3.6.txt create mode 100644 packages/db-dtypes/testing/constraints-3.7.txt create mode 100644 packages/db-dtypes/testing/constraints-3.8.txt create mode 100644 packages/db-dtypes/testing/constraints-3.9.txt create mode 100644 packages/db-dtypes/tests/conftest.py diff --git a/packages/db-dtypes/.coveragerc b/packages/db-dtypes/.coveragerc new file mode 100644 index 000000000000..ad02e9bc3154 --- /dev/null +++ b/packages/db-dtypes/.coveragerc @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[run] +branch = True +omit = + db_dtypes/requirements.py + +[report] +fail_under = 100 +show_missing = True +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ + # Ignore abstract methods + raise NotImplementedError +omit = + */gapic/*.py + */proto/*.py + */core/*.py + */site-packages/*.py + db_dtypes/requirements.py diff --git a/packages/db-dtypes/.flake8 b/packages/db-dtypes/.flake8 new file mode 100644 index 000000000000..29227d4cf419 --- /dev/null +++ b/packages/db-dtypes/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml new file mode 100644 index 000000000000..7b6cc31057ef --- /dev/null +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -0,0 +1,3 @@ +docker: + image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest + digest: sha256:a3a85c2e0b3293068e47b1635b178f7e3d3845f2cfb8722de6713d4bbafdcd1d diff --git a/packages/db-dtypes/.github/.OwlBot.yaml b/packages/db-dtypes/.github/.OwlBot.yaml new file mode 100644 index 000000000000..57184d996cbd --- /dev/null +++ b/packages/db-dtypes/.github/.OwlBot.yaml @@ -0,0 +1,18 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +docker: + image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest + +begin-after-commit-hash: be22498ce258bf2d5fe12fd696d3ad9a2b6c430e diff --git a/packages/db-dtypes/.github/CODEOWNERS b/packages/db-dtypes/.github/CODEOWNERS new file mode 100644 index 000000000000..1473ae0137ab --- /dev/null +++ b/packages/db-dtypes/.github/CODEOWNERS @@ -0,0 +1,11 @@ +# Code owners file. +# This file controls who is tagged for review for any given pull request. +# +# For syntax help see: +# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax + +# The @googleapis/api-bigquery is the default owner for changes in this repo +* @googleapis/api-bigquery @googleapis/yoshi-python + +# The python-samples-reviewers team is the default owner for samples changes +/samples/ @googleapis/python-samples-owners @googleapis/api-bigquery @googleapis/yoshi-python diff --git a/packages/db-dtypes/.github/CONTRIBUTING.md b/packages/db-dtypes/.github/CONTRIBUTING.md new file mode 100644 index 000000000000..939e5341e74d --- /dev/null +++ b/packages/db-dtypes/.github/CONTRIBUTING.md @@ -0,0 +1,28 @@ +# How to Contribute + +We'd love to accept your patches and contributions to this project. There are +just a few small guidelines you need to follow. + +## Contributor License Agreement + +Contributions to this project must be accompanied by a Contributor License +Agreement. You (or your employer) retain the copyright to your contribution; +this simply gives us permission to use and redistribute your contributions as +part of the project. Head over to to see +your current agreements on file or to sign a new one. + +You generally only need to submit a CLA once, so if you've already submitted one +(even if it was for a different project), you probably don't need to do it +again. + +## Code reviews + +All submissions, including submissions by project members, require review. We +use GitHub pull requests for this purpose. Consult +[GitHub Help](https://help.github.com/articles/about-pull-requests/) for more +information on using pull requests. + +## Community Guidelines + +This project follows [Google's Open Source Community +Guidelines](https://opensource.google.com/conduct/). diff --git a/packages/db-dtypes/.github/ISSUE_TEMPLATE/bug_report.md b/packages/db-dtypes/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 000000000000..864681c10f70 --- /dev/null +++ b/packages/db-dtypes/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,43 @@ +--- +name: Bug report +about: Create a report to help us improve + +--- + +Thanks for stopping by to let us know something could be better! + +**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. + +Please run down the following list and make sure you've tried the usual "quick fixes": + + - Search the issues already opened: https://github.com/googleapis/python-bigquery-sqlalchemy/issues + - Search StackOverflow: https://stackoverflow.com/questions/tagged/google-cloud-platform+python + +If you are still having issues, please be sure to include as much information as possible: + +#### Environment details + + - OS type and version: + - Python version: `python --version` + - pip version: `pip --version` + - `db-dtypes` version: `pip show db-dtypes` + +#### Steps to reproduce + + 1. ? + 2. ? + +#### Code example + +```python +# example +``` + +#### Stack trace +``` +# example +``` + +Making sure to follow these steps will guarantee the quickest resolution possible. + +Thanks! diff --git a/packages/db-dtypes/.github/ISSUE_TEMPLATE/feature_request.md b/packages/db-dtypes/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 000000000000..6365857f33c6 --- /dev/null +++ b/packages/db-dtypes/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,18 @@ +--- +name: Feature request +about: Suggest an idea for this library + +--- + +Thanks for stopping by to let us know something could be better! + +**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. + + **Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + **Describe the solution you'd like** +A clear and concise description of what you want to happen. + **Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + **Additional context** +Add any other context or screenshots about the feature request here. diff --git a/packages/db-dtypes/.github/ISSUE_TEMPLATE/support_request.md b/packages/db-dtypes/.github/ISSUE_TEMPLATE/support_request.md new file mode 100644 index 000000000000..995869032125 --- /dev/null +++ b/packages/db-dtypes/.github/ISSUE_TEMPLATE/support_request.md @@ -0,0 +1,7 @@ +--- +name: Support request +about: If you have a support contract with Google, please create an issue in the Google Cloud Support console. + +--- + +**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. diff --git a/packages/db-dtypes/.github/PULL_REQUEST_TEMPLATE.md b/packages/db-dtypes/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 000000000000..029dc14b7ca7 --- /dev/null +++ b/packages/db-dtypes/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,7 @@ +Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: +- [ ] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/python-bigquery-sqlalchemy/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea +- [ ] Ensure the tests and linter pass +- [ ] Code coverage does not decrease (if any source code was changed) +- [ ] Appropriate docs were updated (if necessary) + +Fixes # 🦕 diff --git a/packages/db-dtypes/.github/header-checker-lint.yml b/packages/db-dtypes/.github/header-checker-lint.yml new file mode 100644 index 000000000000..3058bec338cb --- /dev/null +++ b/packages/db-dtypes/.github/header-checker-lint.yml @@ -0,0 +1,15 @@ +{"allowedCopyrightHolders": ["Google LLC"], + "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"], + "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt", "**/__init__.py", "samples/**/constraints.txt", "samples/**/constraints-test.txt"], + "sourceFileExtensions": [ + "ts", + "js", + "java", + "sh", + "Dockerfile", + "yaml", + "py", + "html", + "txt" + ] +} diff --git a/packages/db-dtypes/.github/release-please.yml b/packages/db-dtypes/.github/release-please.yml new file mode 100644 index 000000000000..4507ad0598a5 --- /dev/null +++ b/packages/db-dtypes/.github/release-please.yml @@ -0,0 +1 @@ +releaseType: python diff --git a/packages/db-dtypes/.github/snippet-bot.yml b/packages/db-dtypes/.github/snippet-bot.yml new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/db-dtypes/.gitignore b/packages/db-dtypes/.gitignore new file mode 100644 index 000000000000..b4243ced74e4 --- /dev/null +++ b/packages/db-dtypes/.gitignore @@ -0,0 +1,63 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +.eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated +docs.metadata + +# Virtual environment +env/ + +# Test logs +coverage.xml +*sponge_log.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/db-dtypes/.kokoro/build.sh b/packages/db-dtypes/.kokoro/build.sh new file mode 100755 index 000000000000..2a2874e59f66 --- /dev/null +++ b/packages/db-dtypes/.kokoro/build.sh @@ -0,0 +1,59 @@ +#!/bin/bash +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +if [[ -z "${PROJECT_ROOT:-}" ]]; then + PROJECT_ROOT="github/python-bigquery-sqlalchemy" +fi + +cd "${PROJECT_ROOT}" + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Debug: show build environment +env | grep KOKORO + +# Setup service account credentials. +export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json + +# Setup project id. +export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") + +# Remove old nox +python3 -m pip uninstall --yes --quiet nox-automation + +# Install nox +python3 -m pip install --upgrade --quiet nox +python3 -m nox --version + +# If this is a continuous build, send the test log to the FlakyBot. +# See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. +if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then + cleanup() { + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot + } + trap cleanup EXIT HUP +fi + +# If NOX_SESSION is set, it only runs the specified session, +# otherwise run all the sessions. +if [[ -n "${NOX_SESSION:-}" ]]; then + python3 -m nox -s ${NOX_SESSION:-} +else + python3 -m nox +fi diff --git a/packages/db-dtypes/.kokoro/continuous/common.cfg b/packages/db-dtypes/.kokoro/continuous/common.cfg new file mode 100644 index 000000000000..8c42ee6dc949 --- /dev/null +++ b/packages/db-dtypes/.kokoro/continuous/common.cfg @@ -0,0 +1,27 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Download resources for system tests (service account key, etc.) +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" + +# Use the trampoline script to run in docker. +build_file: "python-bigquery-sqlalchemy/.kokoro/trampoline.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-bigquery-sqlalchemy/.kokoro/build.sh" +} diff --git a/packages/db-dtypes/.kokoro/continuous/continuous.cfg b/packages/db-dtypes/.kokoro/continuous/continuous.cfg new file mode 100644 index 000000000000..18a4c35325b8 --- /dev/null +++ b/packages/db-dtypes/.kokoro/continuous/continuous.cfg @@ -0,0 +1 @@ +# Format: //devtools/kokoro/config/proto/build.proto diff --git a/packages/db-dtypes/.kokoro/docker/docs/Dockerfile b/packages/db-dtypes/.kokoro/docker/docs/Dockerfile new file mode 100644 index 000000000000..4e1b1fb8b5a5 --- /dev/null +++ b/packages/db-dtypes/.kokoro/docker/docs/Dockerfile @@ -0,0 +1,67 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ubuntu:20.04 + +ENV DEBIAN_FRONTEND noninteractive + +# Ensure local Python is preferred over distribution Python. +ENV PATH /usr/local/bin:$PATH + +# Install dependencies. +RUN apt-get update \ + && apt-get install -y --no-install-recommends \ + apt-transport-https \ + build-essential \ + ca-certificates \ + curl \ + dirmngr \ + git \ + gpg-agent \ + graphviz \ + libbz2-dev \ + libdb5.3-dev \ + libexpat1-dev \ + libffi-dev \ + liblzma-dev \ + libreadline-dev \ + libsnappy-dev \ + libssl-dev \ + libsqlite3-dev \ + portaudio19-dev \ + python3-distutils \ + redis-server \ + software-properties-common \ + ssh \ + sudo \ + tcl \ + tcl-dev \ + tk \ + tk-dev \ + uuid-dev \ + wget \ + zlib1g-dev \ + && add-apt-repository universe \ + && apt-get update \ + && apt-get -y install jq \ + && apt-get clean autoclean \ + && apt-get autoremove -y \ + && rm -rf /var/lib/apt/lists/* \ + && rm -f /var/cache/apt/archives/*.deb + +RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ + && python3.8 /tmp/get-pip.py \ + && rm /tmp/get-pip.py + +CMD ["python3.8"] diff --git a/packages/db-dtypes/.kokoro/docs/common.cfg b/packages/db-dtypes/.kokoro/docs/common.cfg new file mode 100644 index 000000000000..a10adabc0685 --- /dev/null +++ b/packages/db-dtypes/.kokoro/docs/common.cfg @@ -0,0 +1,65 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-bigquery-sqlalchemy/.kokoro/trampoline_v2.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-lib-docs" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-bigquery-sqlalchemy/.kokoro/publish-docs.sh" +} + +env_vars: { + key: "STAGING_BUCKET" + value: "docs-staging" +} + +env_vars: { + key: "V2_STAGING_BUCKET" + value: "docs-staging-v2" +} + +# It will upload the docker image after successful builds. +env_vars: { + key: "TRAMPOLINE_IMAGE_UPLOAD" + value: "true" +} + +# It will always build the docker image. +env_vars: { + key: "TRAMPOLINE_DOCKERFILE" + value: ".kokoro/docker/docs/Dockerfile" +} + +# Fetch the token needed for reporting release status to GitHub +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "yoshi-automation-github-key" + } + } +} + +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "docuploader_service_account" + } + } +} diff --git a/packages/db-dtypes/.kokoro/docs/docs-presubmit.cfg b/packages/db-dtypes/.kokoro/docs/docs-presubmit.cfg new file mode 100644 index 000000000000..84d915515829 --- /dev/null +++ b/packages/db-dtypes/.kokoro/docs/docs-presubmit.cfg @@ -0,0 +1,28 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "STAGING_BUCKET" + value: "gcloud-python-test" +} + +env_vars: { + key: "V2_STAGING_BUCKET" + value: "gcloud-python-test" +} + +# We only upload the image in the main `docs` build. +env_vars: { + key: "TRAMPOLINE_IMAGE_UPLOAD" + value: "false" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-bigquery-sqlalchemy/.kokoro/build.sh" +} + +# Only run this nox session. +env_vars: { + key: "NOX_SESSION" + value: "docs docfx" +} diff --git a/packages/db-dtypes/.kokoro/docs/docs.cfg b/packages/db-dtypes/.kokoro/docs/docs.cfg new file mode 100644 index 000000000000..18a4c35325b8 --- /dev/null +++ b/packages/db-dtypes/.kokoro/docs/docs.cfg @@ -0,0 +1 @@ +# Format: //devtools/kokoro/config/proto/build.proto diff --git a/packages/db-dtypes/.kokoro/populate-secrets.sh b/packages/db-dtypes/.kokoro/populate-secrets.sh new file mode 100755 index 000000000000..f52514257ef0 --- /dev/null +++ b/packages/db-dtypes/.kokoro/populate-secrets.sh @@ -0,0 +1,43 @@ +#!/bin/bash +# Copyright 2020 Google LLC. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;} +function msg { println "$*" >&2 ;} +function println { printf '%s\n' "$(now) $*" ;} + + +# Populates requested secrets set in SECRET_MANAGER_KEYS from service account: +# kokoro-trampoline@cloud-devrel-kokoro-resources.iam.gserviceaccount.com +SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager" +msg "Creating folder on disk for secrets: ${SECRET_LOCATION}" +mkdir -p ${SECRET_LOCATION} +for key in $(echo ${SECRET_MANAGER_KEYS} | sed "s/,/ /g") +do + msg "Retrieving secret ${key}" + docker run --entrypoint=gcloud \ + --volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR} \ + gcr.io/google.com/cloudsdktool/cloud-sdk \ + secrets versions access latest \ + --project cloud-devrel-kokoro-resources \ + --secret ${key} > \ + "${SECRET_LOCATION}/${key}" + if [[ $? == 0 ]]; then + msg "Secret written to ${SECRET_LOCATION}/${key}" + else + msg "Error retrieving secret ${key}" + fi +done diff --git a/packages/db-dtypes/.kokoro/presubmit/common.cfg b/packages/db-dtypes/.kokoro/presubmit/common.cfg new file mode 100644 index 000000000000..8c42ee6dc949 --- /dev/null +++ b/packages/db-dtypes/.kokoro/presubmit/common.cfg @@ -0,0 +1,27 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Download resources for system tests (service account key, etc.) +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" + +# Use the trampoline script to run in docker. +build_file: "python-bigquery-sqlalchemy/.kokoro/trampoline.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-bigquery-sqlalchemy/.kokoro/build.sh" +} diff --git a/packages/db-dtypes/.kokoro/presubmit/presubmit.cfg b/packages/db-dtypes/.kokoro/presubmit/presubmit.cfg new file mode 100644 index 000000000000..18a4c35325b8 --- /dev/null +++ b/packages/db-dtypes/.kokoro/presubmit/presubmit.cfg @@ -0,0 +1 @@ +# Format: //devtools/kokoro/config/proto/build.proto diff --git a/packages/db-dtypes/.kokoro/publish-docs.sh b/packages/db-dtypes/.kokoro/publish-docs.sh new file mode 100755 index 000000000000..8acb14e802b0 --- /dev/null +++ b/packages/db-dtypes/.kokoro/publish-docs.sh @@ -0,0 +1,64 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +export PATH="${HOME}/.local/bin:${PATH}" + +# Install nox +python3 -m pip install --user --upgrade --quiet nox +python3 -m nox --version + +# build docs +nox -s docs + +python3 -m pip install --user gcp-docuploader + +# create metadata +python3 -m docuploader create-metadata \ + --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ + --version=$(python3 setup.py --version) \ + --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ + --distribution-name=$(python3 setup.py --name) \ + --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ + --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ + --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) + +cat docs.metadata + +# upload docs +python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}" + + +# docfx yaml files +nox -s docfx + +# create metadata. +python3 -m docuploader create-metadata \ + --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ + --version=$(python3 setup.py --version) \ + --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ + --distribution-name=$(python3 setup.py --name) \ + --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ + --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ + --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) + +cat docs.metadata + +# upload docs +python3 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}" diff --git a/packages/db-dtypes/.kokoro/release.sh b/packages/db-dtypes/.kokoro/release.sh new file mode 100755 index 000000000000..75b7532fe70d --- /dev/null +++ b/packages/db-dtypes/.kokoro/release.sh @@ -0,0 +1,32 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +# Start the releasetool reporter +python3 -m pip install gcp-releasetool +python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script + +# Ensure that we have the latest versions of Twine, Wheel, and Setuptools. +python3 -m pip install --upgrade twine wheel setuptools + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Move into the package, build the distribution and upload. +TWINE_PASSWORD=$(cat "${KOKORO_GFILE_DIR}/secret_manager/google-cloud-pypi-token") +cd github/python-bigquery-sqlalchemy +python3 setup.py sdist bdist_wheel +twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/packages/db-dtypes/.kokoro/release/common.cfg b/packages/db-dtypes/.kokoro/release/common.cfg new file mode 100644 index 000000000000..8f3be12645db --- /dev/null +++ b/packages/db-dtypes/.kokoro/release/common.cfg @@ -0,0 +1,30 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-bigquery-sqlalchemy/.kokoro/trampoline.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-bigquery-sqlalchemy/.kokoro/release.sh" +} + +# Tokens needed to report release status back to GitHub +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem,google-cloud-pypi-token" +} diff --git a/packages/db-dtypes/.kokoro/release/release.cfg b/packages/db-dtypes/.kokoro/release/release.cfg new file mode 100644 index 000000000000..18a4c35325b8 --- /dev/null +++ b/packages/db-dtypes/.kokoro/release/release.cfg @@ -0,0 +1 @@ +# Format: //devtools/kokoro/config/proto/build.proto diff --git a/packages/db-dtypes/.kokoro/samples/lint/common.cfg b/packages/db-dtypes/.kokoro/samples/lint/common.cfg new file mode 100644 index 000000000000..35ebc5d2663e --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/lint/common.cfg @@ -0,0 +1,34 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "lint" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-bigquery-sqlalchemy/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-bigquery-sqlalchemy/.kokoro/trampoline.sh" diff --git a/packages/db-dtypes/.kokoro/samples/lint/continuous.cfg b/packages/db-dtypes/.kokoro/samples/lint/continuous.cfg new file mode 100644 index 000000000000..b196817872e9 --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/lint/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} diff --git a/packages/db-dtypes/.kokoro/samples/lint/periodic.cfg b/packages/db-dtypes/.kokoro/samples/lint/periodic.cfg new file mode 100644 index 000000000000..71cd1e597e38 --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/lint/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/packages/db-dtypes/.kokoro/samples/lint/presubmit.cfg b/packages/db-dtypes/.kokoro/samples/lint/presubmit.cfg new file mode 100644 index 000000000000..b196817872e9 --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/lint/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} diff --git a/packages/db-dtypes/.kokoro/samples/python3.6/common.cfg b/packages/db-dtypes/.kokoro/samples/python3.6/common.cfg new file mode 100644 index 000000000000..2ff740aad0a8 --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/python3.6/common.cfg @@ -0,0 +1,40 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.6" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py36" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-bigquery-sqlalchemy/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-bigquery-sqlalchemy/.kokoro/trampoline.sh" diff --git a/packages/db-dtypes/.kokoro/samples/python3.6/continuous.cfg b/packages/db-dtypes/.kokoro/samples/python3.6/continuous.cfg new file mode 100644 index 000000000000..b196817872e9 --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/python3.6/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} diff --git a/packages/db-dtypes/.kokoro/samples/python3.6/periodic-head.cfg b/packages/db-dtypes/.kokoro/samples/python3.6/periodic-head.cfg new file mode 100644 index 000000000000..abf3481d14dd --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/python3.6/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-bigquery-sqlalchemy/.kokoro/test-samples-against-head.sh" +} diff --git a/packages/db-dtypes/.kokoro/samples/python3.6/periodic.cfg b/packages/db-dtypes/.kokoro/samples/python3.6/periodic.cfg new file mode 100644 index 000000000000..71cd1e597e38 --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/python3.6/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/packages/db-dtypes/.kokoro/samples/python3.6/presubmit.cfg b/packages/db-dtypes/.kokoro/samples/python3.6/presubmit.cfg new file mode 100644 index 000000000000..b196817872e9 --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/python3.6/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} diff --git a/packages/db-dtypes/.kokoro/samples/python3.7/common.cfg b/packages/db-dtypes/.kokoro/samples/python3.7/common.cfg new file mode 100644 index 000000000000..e5444b7774b4 --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/python3.7/common.cfg @@ -0,0 +1,40 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.7" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py37" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-bigquery-sqlalchemy/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-bigquery-sqlalchemy/.kokoro/trampoline.sh" diff --git a/packages/db-dtypes/.kokoro/samples/python3.7/continuous.cfg b/packages/db-dtypes/.kokoro/samples/python3.7/continuous.cfg new file mode 100644 index 000000000000..b196817872e9 --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/python3.7/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} diff --git a/packages/db-dtypes/.kokoro/samples/python3.7/periodic-head.cfg b/packages/db-dtypes/.kokoro/samples/python3.7/periodic-head.cfg new file mode 100644 index 000000000000..abf3481d14dd --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/python3.7/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-bigquery-sqlalchemy/.kokoro/test-samples-against-head.sh" +} diff --git a/packages/db-dtypes/.kokoro/samples/python3.7/periodic.cfg b/packages/db-dtypes/.kokoro/samples/python3.7/periodic.cfg new file mode 100644 index 000000000000..71cd1e597e38 --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/python3.7/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/packages/db-dtypes/.kokoro/samples/python3.7/presubmit.cfg b/packages/db-dtypes/.kokoro/samples/python3.7/presubmit.cfg new file mode 100644 index 000000000000..b196817872e9 --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/python3.7/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} diff --git a/packages/db-dtypes/.kokoro/samples/python3.8/common.cfg b/packages/db-dtypes/.kokoro/samples/python3.8/common.cfg new file mode 100644 index 000000000000..1cfbee76caa5 --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/python3.8/common.cfg @@ -0,0 +1,40 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.8" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py38" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-bigquery-sqlalchemy/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-bigquery-sqlalchemy/.kokoro/trampoline.sh" diff --git a/packages/db-dtypes/.kokoro/samples/python3.8/continuous.cfg b/packages/db-dtypes/.kokoro/samples/python3.8/continuous.cfg new file mode 100644 index 000000000000..b196817872e9 --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/python3.8/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} diff --git a/packages/db-dtypes/.kokoro/samples/python3.8/periodic-head.cfg b/packages/db-dtypes/.kokoro/samples/python3.8/periodic-head.cfg new file mode 100644 index 000000000000..abf3481d14dd --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/python3.8/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-bigquery-sqlalchemy/.kokoro/test-samples-against-head.sh" +} diff --git a/packages/db-dtypes/.kokoro/samples/python3.8/periodic.cfg b/packages/db-dtypes/.kokoro/samples/python3.8/periodic.cfg new file mode 100644 index 000000000000..71cd1e597e38 --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/python3.8/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/packages/db-dtypes/.kokoro/samples/python3.8/presubmit.cfg b/packages/db-dtypes/.kokoro/samples/python3.8/presubmit.cfg new file mode 100644 index 000000000000..b196817872e9 --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/python3.8/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} diff --git a/packages/db-dtypes/.kokoro/samples/python3.9/common.cfg b/packages/db-dtypes/.kokoro/samples/python3.9/common.cfg new file mode 100644 index 000000000000..cb8e45689a71 --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/python3.9/common.cfg @@ -0,0 +1,40 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.9" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py39" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-bigquery-sqlalchemy/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-bigquery-sqlalchemy/.kokoro/trampoline.sh" diff --git a/packages/db-dtypes/.kokoro/samples/python3.9/continuous.cfg b/packages/db-dtypes/.kokoro/samples/python3.9/continuous.cfg new file mode 100644 index 000000000000..b196817872e9 --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/python3.9/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} diff --git a/packages/db-dtypes/.kokoro/samples/python3.9/periodic-head.cfg b/packages/db-dtypes/.kokoro/samples/python3.9/periodic-head.cfg new file mode 100644 index 000000000000..abf3481d14dd --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/python3.9/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-bigquery-sqlalchemy/.kokoro/test-samples-against-head.sh" +} diff --git a/packages/db-dtypes/.kokoro/samples/python3.9/periodic.cfg b/packages/db-dtypes/.kokoro/samples/python3.9/periodic.cfg new file mode 100644 index 000000000000..71cd1e597e38 --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/python3.9/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/packages/db-dtypes/.kokoro/samples/python3.9/presubmit.cfg b/packages/db-dtypes/.kokoro/samples/python3.9/presubmit.cfg new file mode 100644 index 000000000000..b196817872e9 --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/python3.9/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} diff --git a/packages/db-dtypes/.kokoro/test-samples-against-head.sh b/packages/db-dtypes/.kokoro/test-samples-against-head.sh new file mode 100755 index 000000000000..a62db989762e --- /dev/null +++ b/packages/db-dtypes/.kokoro/test-samples-against-head.sh @@ -0,0 +1,28 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# A customized test runner for samples. +# +# For periodic builds, you can specify this file for testing against head. + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +cd github/python-bigquery-sqlalchemy + +exec .kokoro/test-samples-impl.sh diff --git a/packages/db-dtypes/.kokoro/test-samples-impl.sh b/packages/db-dtypes/.kokoro/test-samples-impl.sh new file mode 100755 index 000000000000..8a324c9c7bc6 --- /dev/null +++ b/packages/db-dtypes/.kokoro/test-samples-impl.sh @@ -0,0 +1,102 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +# Exit early if samples don't exist +if ! find samples -name 'requirements.txt' | grep -q .; then + echo "No tests run. './samples/**/requirements.txt' not found" + exit 0 +fi + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Debug: show build environment +env | grep KOKORO + +# Install nox +python3.6 -m pip install --upgrade --quiet nox + +# Use secrets acessor service account to get secrets +if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then + gcloud auth activate-service-account \ + --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ + --project="cloud-devrel-kokoro-resources" +fi + +# This script will create 3 files: +# - testing/test-env.sh +# - testing/service-account.json +# - testing/client-secrets.json +./scripts/decrypt-secrets.sh + +source ./testing/test-env.sh +export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json + +# For cloud-run session, we activate the service account for gcloud sdk. +gcloud auth activate-service-account \ + --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" + +export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json + +echo -e "\n******************** TESTING PROJECTS ********************" + +# Switch to 'fail at end' to allow all tests to complete before exiting. +set +e +# Use RTN to return a non-zero value if the test fails. +RTN=0 +ROOT=$(pwd) +# Find all requirements.txt in the samples directory (may break on whitespace). +for file in samples/**/requirements.txt; do + cd "$ROOT" + # Navigate to the project folder. + file=$(dirname "$file") + cd "$file" + + echo "------------------------------------------------------------" + echo "- testing $file" + echo "------------------------------------------------------------" + + # Use nox to execute the tests for the project. + python3.6 -m nox -s "$RUN_TESTS_SESSION" + EXIT=$? + + # If this is a periodic build, send the test log to the FlakyBot. + # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. + if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot + fi + + if [[ $EXIT -ne 0 ]]; then + RTN=1 + echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" + else + echo -e "\n Testing completed.\n" + fi + +done +cd "$ROOT" + +# Workaround for Kokoro permissions issue: delete secrets +rm testing/{test-env.sh,client-secrets.json,service-account.json} + +exit "$RTN" diff --git a/packages/db-dtypes/.kokoro/test-samples.sh b/packages/db-dtypes/.kokoro/test-samples.sh new file mode 100755 index 000000000000..f7713a95f09f --- /dev/null +++ b/packages/db-dtypes/.kokoro/test-samples.sh @@ -0,0 +1,46 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# The default test runner for samples. +# +# For periodic builds, we rewinds the repo to the latest release, and +# run test-samples-impl.sh. + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +cd github/python-bigquery-sqlalchemy + +# Run periodic samples tests at latest release +if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + # preserving the test runner implementation. + cp .kokoro/test-samples-impl.sh "${TMPDIR}/test-samples-impl.sh" + echo "--- IMPORTANT IMPORTANT IMPORTANT ---" + echo "Now we rewind the repo back to the latest release..." + LATEST_RELEASE=$(git describe --abbrev=0 --tags) + git checkout $LATEST_RELEASE + echo "The current head is: " + echo $(git rev-parse --verify HEAD) + echo "--- IMPORTANT IMPORTANT IMPORTANT ---" + # move back the test runner implementation if there's no file. + if [ ! -f .kokoro/test-samples-impl.sh ]; then + cp "${TMPDIR}/test-samples-impl.sh" .kokoro/test-samples-impl.sh + fi +fi + +exec .kokoro/test-samples-impl.sh diff --git a/packages/db-dtypes/.kokoro/trampoline.sh b/packages/db-dtypes/.kokoro/trampoline.sh new file mode 100755 index 000000000000..a4241db23f41 --- /dev/null +++ b/packages/db-dtypes/.kokoro/trampoline.sh @@ -0,0 +1,28 @@ +#!/bin/bash +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +# Always run the cleanup script, regardless of the success of bouncing into +# the container. +function cleanup() { + chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh + ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh + echo "cleanup"; +} +trap cleanup EXIT + +$(dirname $0)/populate-secrets.sh # Secret Manager secrets. +python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" diff --git a/packages/db-dtypes/.kokoro/trampoline_v2.sh b/packages/db-dtypes/.kokoro/trampoline_v2.sh new file mode 100755 index 000000000000..4af6cdc26dbc --- /dev/null +++ b/packages/db-dtypes/.kokoro/trampoline_v2.sh @@ -0,0 +1,487 @@ +#!/usr/bin/env bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# trampoline_v2.sh +# +# This script does 3 things. +# +# 1. Prepare the Docker image for the test +# 2. Run the Docker with appropriate flags to run the test +# 3. Upload the newly built Docker image +# +# in a way that is somewhat compatible with trampoline_v1. +# +# To run this script, first download few files from gcs to /dev/shm. +# (/dev/shm is passed into the container as KOKORO_GFILE_DIR). +# +# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/secrets_viewer_service_account.json /dev/shm +# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/automl_secrets.txt /dev/shm +# +# Then run the script. +# .kokoro/trampoline_v2.sh +# +# These environment variables are required: +# TRAMPOLINE_IMAGE: The docker image to use. +# TRAMPOLINE_DOCKERFILE: The location of the Dockerfile. +# +# You can optionally change these environment variables: +# TRAMPOLINE_IMAGE_UPLOAD: +# (true|false): Whether to upload the Docker image after the +# successful builds. +# TRAMPOLINE_BUILD_FILE: The script to run in the docker container. +# TRAMPOLINE_WORKSPACE: The workspace path in the docker container. +# Defaults to /workspace. +# Potentially there are some repo specific envvars in .trampolinerc in +# the project root. + + +set -euo pipefail + +TRAMPOLINE_VERSION="2.0.5" + +if command -v tput >/dev/null && [[ -n "${TERM:-}" ]]; then + readonly IO_COLOR_RED="$(tput setaf 1)" + readonly IO_COLOR_GREEN="$(tput setaf 2)" + readonly IO_COLOR_YELLOW="$(tput setaf 3)" + readonly IO_COLOR_RESET="$(tput sgr0)" +else + readonly IO_COLOR_RED="" + readonly IO_COLOR_GREEN="" + readonly IO_COLOR_YELLOW="" + readonly IO_COLOR_RESET="" +fi + +function function_exists { + [ $(LC_ALL=C type -t $1)"" == "function" ] +} + +# Logs a message using the given color. The first argument must be one +# of the IO_COLOR_* variables defined above, such as +# "${IO_COLOR_YELLOW}". The remaining arguments will be logged in the +# given color. The log message will also have an RFC-3339 timestamp +# prepended (in UTC). You can disable the color output by setting +# TERM=vt100. +function log_impl() { + local color="$1" + shift + local timestamp="$(date -u "+%Y-%m-%dT%H:%M:%SZ")" + echo "================================================================" + echo "${color}${timestamp}:" "$@" "${IO_COLOR_RESET}" + echo "================================================================" +} + +# Logs the given message with normal coloring and a timestamp. +function log() { + log_impl "${IO_COLOR_RESET}" "$@" +} + +# Logs the given message in green with a timestamp. +function log_green() { + log_impl "${IO_COLOR_GREEN}" "$@" +} + +# Logs the given message in yellow with a timestamp. +function log_yellow() { + log_impl "${IO_COLOR_YELLOW}" "$@" +} + +# Logs the given message in red with a timestamp. +function log_red() { + log_impl "${IO_COLOR_RED}" "$@" +} + +readonly tmpdir=$(mktemp -d -t ci-XXXXXXXX) +readonly tmphome="${tmpdir}/h" +mkdir -p "${tmphome}" + +function cleanup() { + rm -rf "${tmpdir}" +} +trap cleanup EXIT + +RUNNING_IN_CI="${RUNNING_IN_CI:-false}" + +# The workspace in the container, defaults to /workspace. +TRAMPOLINE_WORKSPACE="${TRAMPOLINE_WORKSPACE:-/workspace}" + +pass_down_envvars=( + # TRAMPOLINE_V2 variables. + # Tells scripts whether they are running as part of CI or not. + "RUNNING_IN_CI" + # Indicates which CI system we're in. + "TRAMPOLINE_CI" + # Indicates the version of the script. + "TRAMPOLINE_VERSION" +) + +log_yellow "Building with Trampoline ${TRAMPOLINE_VERSION}" + +# Detect which CI systems we're in. If we're in any of the CI systems +# we support, `RUNNING_IN_CI` will be true and `TRAMPOLINE_CI` will be +# the name of the CI system. Both envvars will be passing down to the +# container for telling which CI system we're in. +if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then + # descriptive env var for indicating it's on CI. + RUNNING_IN_CI="true" + TRAMPOLINE_CI="kokoro" + if [[ "${TRAMPOLINE_USE_LEGACY_SERVICE_ACCOUNT:-}" == "true" ]]; then + if [[ ! -f "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" ]]; then + log_red "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json does not exist. Did you forget to mount cloud-devrel-kokoro-resources/trampoline? Aborting." + exit 1 + fi + # This service account will be activated later. + TRAMPOLINE_SERVICE_ACCOUNT="${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" + else + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + gcloud auth list + fi + log_yellow "Configuring Container Registry access" + gcloud auth configure-docker --quiet + fi + pass_down_envvars+=( + # KOKORO dynamic variables. + "KOKORO_BUILD_NUMBER" + "KOKORO_BUILD_ID" + "KOKORO_JOB_NAME" + "KOKORO_GIT_COMMIT" + "KOKORO_GITHUB_COMMIT" + "KOKORO_GITHUB_PULL_REQUEST_NUMBER" + "KOKORO_GITHUB_PULL_REQUEST_COMMIT" + # For FlakyBot + "KOKORO_GITHUB_COMMIT_URL" + "KOKORO_GITHUB_PULL_REQUEST_URL" + ) +elif [[ "${TRAVIS:-}" == "true" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="travis" + pass_down_envvars+=( + "TRAVIS_BRANCH" + "TRAVIS_BUILD_ID" + "TRAVIS_BUILD_NUMBER" + "TRAVIS_BUILD_WEB_URL" + "TRAVIS_COMMIT" + "TRAVIS_COMMIT_MESSAGE" + "TRAVIS_COMMIT_RANGE" + "TRAVIS_JOB_NAME" + "TRAVIS_JOB_NUMBER" + "TRAVIS_JOB_WEB_URL" + "TRAVIS_PULL_REQUEST" + "TRAVIS_PULL_REQUEST_BRANCH" + "TRAVIS_PULL_REQUEST_SHA" + "TRAVIS_PULL_REQUEST_SLUG" + "TRAVIS_REPO_SLUG" + "TRAVIS_SECURE_ENV_VARS" + "TRAVIS_TAG" + ) +elif [[ -n "${GITHUB_RUN_ID:-}" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="github-workflow" + pass_down_envvars+=( + "GITHUB_WORKFLOW" + "GITHUB_RUN_ID" + "GITHUB_RUN_NUMBER" + "GITHUB_ACTION" + "GITHUB_ACTIONS" + "GITHUB_ACTOR" + "GITHUB_REPOSITORY" + "GITHUB_EVENT_NAME" + "GITHUB_EVENT_PATH" + "GITHUB_SHA" + "GITHUB_REF" + "GITHUB_HEAD_REF" + "GITHUB_BASE_REF" + ) +elif [[ "${CIRCLECI:-}" == "true" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="circleci" + pass_down_envvars+=( + "CIRCLE_BRANCH" + "CIRCLE_BUILD_NUM" + "CIRCLE_BUILD_URL" + "CIRCLE_COMPARE_URL" + "CIRCLE_JOB" + "CIRCLE_NODE_INDEX" + "CIRCLE_NODE_TOTAL" + "CIRCLE_PREVIOUS_BUILD_NUM" + "CIRCLE_PROJECT_REPONAME" + "CIRCLE_PROJECT_USERNAME" + "CIRCLE_REPOSITORY_URL" + "CIRCLE_SHA1" + "CIRCLE_STAGE" + "CIRCLE_USERNAME" + "CIRCLE_WORKFLOW_ID" + "CIRCLE_WORKFLOW_JOB_ID" + "CIRCLE_WORKFLOW_UPSTREAM_JOB_IDS" + "CIRCLE_WORKFLOW_WORKSPACE_ID" + ) +fi + +# Configure the service account for pulling the docker image. +function repo_root() { + local dir="$1" + while [[ ! -d "${dir}/.git" ]]; do + dir="$(dirname "$dir")" + done + echo "${dir}" +} + +# Detect the project root. In CI builds, we assume the script is in +# the git tree and traverse from there, otherwise, traverse from `pwd` +# to find `.git` directory. +if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + PROGRAM_PATH="$(realpath "$0")" + PROGRAM_DIR="$(dirname "${PROGRAM_PATH}")" + PROJECT_ROOT="$(repo_root "${PROGRAM_DIR}")" +else + PROJECT_ROOT="$(repo_root $(pwd))" +fi + +log_yellow "Changing to the project root: ${PROJECT_ROOT}." +cd "${PROJECT_ROOT}" + +# To support relative path for `TRAMPOLINE_SERVICE_ACCOUNT`, we need +# to use this environment variable in `PROJECT_ROOT`. +if [[ -n "${TRAMPOLINE_SERVICE_ACCOUNT:-}" ]]; then + + mkdir -p "${tmpdir}/gcloud" + gcloud_config_dir="${tmpdir}/gcloud" + + log_yellow "Using isolated gcloud config: ${gcloud_config_dir}." + export CLOUDSDK_CONFIG="${gcloud_config_dir}" + + log_yellow "Using ${TRAMPOLINE_SERVICE_ACCOUNT} for authentication." + gcloud auth activate-service-account \ + --key-file "${TRAMPOLINE_SERVICE_ACCOUNT}" + log_yellow "Configuring Container Registry access" + gcloud auth configure-docker --quiet +fi + +required_envvars=( + # The basic trampoline configurations. + "TRAMPOLINE_IMAGE" + "TRAMPOLINE_BUILD_FILE" +) + +if [[ -f "${PROJECT_ROOT}/.trampolinerc" ]]; then + source "${PROJECT_ROOT}/.trampolinerc" +fi + +log_yellow "Checking environment variables." +for e in "${required_envvars[@]}" +do + if [[ -z "${!e:-}" ]]; then + log "Missing ${e} env var. Aborting." + exit 1 + fi +done + +# We want to support legacy style TRAMPOLINE_BUILD_FILE used with V1 +# script: e.g. "github/repo-name/.kokoro/run_tests.sh" +TRAMPOLINE_BUILD_FILE="${TRAMPOLINE_BUILD_FILE#github/*/}" +log_yellow "Using TRAMPOLINE_BUILD_FILE: ${TRAMPOLINE_BUILD_FILE}" + +# ignore error on docker operations and test execution +set +e + +log_yellow "Preparing Docker image." +# We only download the docker image in CI builds. +if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + # Download the docker image specified by `TRAMPOLINE_IMAGE` + + # We may want to add --max-concurrent-downloads flag. + + log_yellow "Start pulling the Docker image: ${TRAMPOLINE_IMAGE}." + if docker pull "${TRAMPOLINE_IMAGE}"; then + log_green "Finished pulling the Docker image: ${TRAMPOLINE_IMAGE}." + has_image="true" + else + log_red "Failed pulling the Docker image: ${TRAMPOLINE_IMAGE}." + has_image="false" + fi +else + # For local run, check if we have the image. + if docker images "${TRAMPOLINE_IMAGE}:latest" | grep "${TRAMPOLINE_IMAGE}"; then + has_image="true" + else + has_image="false" + fi +fi + + +# The default user for a Docker container has uid 0 (root). To avoid +# creating root-owned files in the build directory we tell docker to +# use the current user ID. +user_uid="$(id -u)" +user_gid="$(id -g)" +user_name="$(id -un)" + +# To allow docker in docker, we add the user to the docker group in +# the host os. +docker_gid=$(cut -d: -f3 < <(getent group docker)) + +update_cache="false" +if [[ "${TRAMPOLINE_DOCKERFILE:-none}" != "none" ]]; then + # Build the Docker image from the source. + context_dir=$(dirname "${TRAMPOLINE_DOCKERFILE}") + docker_build_flags=( + "-f" "${TRAMPOLINE_DOCKERFILE}" + "-t" "${TRAMPOLINE_IMAGE}" + "--build-arg" "UID=${user_uid}" + "--build-arg" "USERNAME=${user_name}" + ) + if [[ "${has_image}" == "true" ]]; then + docker_build_flags+=("--cache-from" "${TRAMPOLINE_IMAGE}") + fi + + log_yellow "Start building the docker image." + if [[ "${TRAMPOLINE_VERBOSE:-false}" == "true" ]]; then + echo "docker build" "${docker_build_flags[@]}" "${context_dir}" + fi + + # ON CI systems, we want to suppress docker build logs, only + # output the logs when it fails. + if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + if docker build "${docker_build_flags[@]}" "${context_dir}" \ + > "${tmpdir}/docker_build.log" 2>&1; then + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + cat "${tmpdir}/docker_build.log" + fi + + log_green "Finished building the docker image." + update_cache="true" + else + log_red "Failed to build the Docker image, aborting." + log_yellow "Dumping the build logs:" + cat "${tmpdir}/docker_build.log" + exit 1 + fi + else + if docker build "${docker_build_flags[@]}" "${context_dir}"; then + log_green "Finished building the docker image." + update_cache="true" + else + log_red "Failed to build the Docker image, aborting." + exit 1 + fi + fi +else + if [[ "${has_image}" != "true" ]]; then + log_red "We do not have ${TRAMPOLINE_IMAGE} locally, aborting." + exit 1 + fi +fi + +# We use an array for the flags so they are easier to document. +docker_flags=( + # Remove the container after it exists. + "--rm" + + # Use the host network. + "--network=host" + + # Run in priviledged mode. We are not using docker for sandboxing or + # isolation, just for packaging our dev tools. + "--privileged" + + # Run the docker script with the user id. Because the docker image gets to + # write in ${PWD} you typically want this to be your user id. + # To allow docker in docker, we need to use docker gid on the host. + "--user" "${user_uid}:${docker_gid}" + + # Pass down the USER. + "--env" "USER=${user_name}" + + # Mount the project directory inside the Docker container. + "--volume" "${PROJECT_ROOT}:${TRAMPOLINE_WORKSPACE}" + "--workdir" "${TRAMPOLINE_WORKSPACE}" + "--env" "PROJECT_ROOT=${TRAMPOLINE_WORKSPACE}" + + # Mount the temporary home directory. + "--volume" "${tmphome}:/h" + "--env" "HOME=/h" + + # Allow docker in docker. + "--volume" "/var/run/docker.sock:/var/run/docker.sock" + + # Mount the /tmp so that docker in docker can mount the files + # there correctly. + "--volume" "/tmp:/tmp" + # Pass down the KOKORO_GFILE_DIR and KOKORO_KEYSTORE_DIR + # TODO(tmatsuo): This part is not portable. + "--env" "TRAMPOLINE_SECRET_DIR=/secrets" + "--volume" "${KOKORO_GFILE_DIR:-/dev/shm}:/secrets/gfile" + "--env" "KOKORO_GFILE_DIR=/secrets/gfile" + "--volume" "${KOKORO_KEYSTORE_DIR:-/dev/shm}:/secrets/keystore" + "--env" "KOKORO_KEYSTORE_DIR=/secrets/keystore" +) + +# Add an option for nicer output if the build gets a tty. +if [[ -t 0 ]]; then + docker_flags+=("-it") +fi + +# Passing down env vars +for e in "${pass_down_envvars[@]}" +do + if [[ -n "${!e:-}" ]]; then + docker_flags+=("--env" "${e}=${!e}") + fi +done + +# If arguments are given, all arguments will become the commands run +# in the container, otherwise run TRAMPOLINE_BUILD_FILE. +if [[ $# -ge 1 ]]; then + log_yellow "Running the given commands '" "${@:1}" "' in the container." + readonly commands=("${@:1}") + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" + fi + docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" +else + log_yellow "Running the tests in a Docker container." + docker_flags+=("--entrypoint=${TRAMPOLINE_BUILD_FILE}") + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" + fi + docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" +fi + + +test_retval=$? + +if [[ ${test_retval} -eq 0 ]]; then + log_green "Build finished with ${test_retval}" +else + log_red "Build finished with ${test_retval}" +fi + +# Only upload it when the test passes. +if [[ "${update_cache}" == "true" ]] && \ + [[ $test_retval == 0 ]] && \ + [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]]; then + log_yellow "Uploading the Docker image." + if docker push "${TRAMPOLINE_IMAGE}"; then + log_green "Finished uploading the Docker image." + else + log_red "Failed uploading the Docker image." + fi + # Call trampoline_after_upload_hook if it's defined. + if function_exists trampoline_after_upload_hook; then + trampoline_after_upload_hook + fi + +fi + +exit "${test_retval}" diff --git a/packages/db-dtypes/.pre-commit-config.yaml b/packages/db-dtypes/.pre-commit-config.yaml new file mode 100644 index 000000000000..62eb5a77d9a3 --- /dev/null +++ b/packages/db-dtypes/.pre-commit-config.yaml @@ -0,0 +1,31 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.0.1 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml +- repo: https://github.com/psf/black + rev: 19.10b0 + hooks: + - id: black +- repo: https://gitlab.com/pycqa/flake8 + rev: 3.9.2 + hooks: + - id: flake8 diff --git a/packages/db-dtypes/.repo-metadata.json b/packages/db-dtypes/.repo-metadata.json new file mode 100644 index 000000000000..7b37150c6c5e --- /dev/null +++ b/packages/db-dtypes/.repo-metadata.json @@ -0,0 +1,12 @@ +{ + "name": "db-dtypes", + "name_pretty": "Pandas Data Types for SQL systems (BigQuery, Spanner)", + "client_documentation": + "https://googleapis.dev/python/db-dtypes/latest/index.html", + "release_level": "beta", + "language": "python", + "library_type": "INTEGRATION", + "repo": "googleapis/python-db-dtypes-pandas", + "distribution_name": "db-dtypes", + "api_id": "bigquery.googleapis.com" +} diff --git a/packages/db-dtypes/.trampolinerc b/packages/db-dtypes/.trampolinerc new file mode 100644 index 000000000000..383b6ec89fbc --- /dev/null +++ b/packages/db-dtypes/.trampolinerc @@ -0,0 +1,52 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Template for .trampolinerc + +# Add required env vars here. +required_envvars+=( + "STAGING_BUCKET" + "V2_STAGING_BUCKET" +) + +# Add env vars which are passed down into the container here. +pass_down_envvars+=( + "STAGING_BUCKET" + "V2_STAGING_BUCKET" + "NOX_SESSION" +) + +# Prevent unintentional override on the default image. +if [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]] && \ + [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then + echo "Please set TRAMPOLINE_IMAGE if you want to upload the Docker image." + exit 1 +fi + +# Define the default value if it makes sense. +if [[ -z "${TRAMPOLINE_IMAGE_UPLOAD:-}" ]]; then + TRAMPOLINE_IMAGE_UPLOAD="" +fi + +if [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then + TRAMPOLINE_IMAGE="" +fi + +if [[ -z "${TRAMPOLINE_DOCKERFILE:-}" ]]; then + TRAMPOLINE_DOCKERFILE="" +fi + +if [[ -z "${TRAMPOLINE_BUILD_FILE:-}" ]]; then + TRAMPOLINE_BUILD_FILE="" +fi diff --git a/packages/db-dtypes/AUTHORS b/packages/db-dtypes/AUTHORS new file mode 100644 index 000000000000..ae8f0d2d3a1b --- /dev/null +++ b/packages/db-dtypes/AUTHORS @@ -0,0 +1,9 @@ +# This is the list of db-dtypes's significant contributors. +# +# This does not necessarily list everyone who has contributed code, +# especially since many employees of one corporation may be contributing. +# To see the full list of contributors, see the revision history in +# source control. +Google LLC +Jim Fulton +Tim Swast (tswast) diff --git a/packages/db-dtypes/CHANGELOG.md b/packages/db-dtypes/CHANGELOG.md new file mode 100644 index 000000000000..e59584268120 --- /dev/null +++ b/packages/db-dtypes/CHANGELOG.md @@ -0,0 +1,5 @@ +# Changelog + +[db-dtypes PyPI History][2] + +[2]: https://pypi.org/project/db-dtypes/#history diff --git a/packages/db-dtypes/CODE_OF_CONDUCT.md b/packages/db-dtypes/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..84ff396aef58 --- /dev/null +++ b/packages/db-dtypes/CODE_OF_CONDUCT.md @@ -0,0 +1,95 @@ + +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html diff --git a/packages/db-dtypes/CONTRIBUTING.rst b/packages/db-dtypes/CONTRIBUTING.rst new file mode 100644 index 000000000000..172f9dfccb74 --- /dev/null +++ b/packages/db-dtypes/CONTRIBUTING.rst @@ -0,0 +1,279 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: + 3.6, 3.7, 3.8 and 3.9 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``python-bigquery-sqlalchemy`` `repo`_ on GitHub. + +- Fork and clone the ``python-bigquery-sqlalchemy`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``python-bigquery-sqlalchemy`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-python-bigquery-sqlalchemy``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/python-bigquery-sqlalchemy.git hack-on-python-bigquery-sqlalchemy + $ cd hack-on-python-bigquery-sqlalchemy + # Configure remotes such that you can pull changes from the googleapis/python-bigquery-sqlalchemy + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/python-bigquery-sqlalchemy.git + # fetch and merge changes from upstream into main + $ git fetch upstream + $ git merge upstream/main + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/python-bigquery-sqlalchemy + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.9 -- -k + + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="main" + + By doing this, you are specifying the location of the most up-to-date + version of ``python-bigquery-sqlalchemy``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system-3.9 -- -k + + + .. note:: + + System tests are only configured to run under Python 3.8 and 3.9. + For expediency, we do not run them in older versions of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/python-bigquery-sqlalchemy/blob/main/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/db-dtypes + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.6`_ +- `Python 3.7`_ +- `Python 3.8`_ +- `Python 3.9`_ + +.. _Python 3.6: https://docs.python.org/3.6/ +.. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/python-bigquery-sqlalchemy/blob/main/noxfile.py + + +We also explicitly decided to support Python 3 beginning with version 3.6. +Reasons for this include: + +- Encouraging use of newest versions of Python 3 +- Taking the lead of `prominent`_ open-source `projects`_ +- `Unicode literal support`_ which allows for a cleaner codebase that + works in both Python 2 and Python 3 + +.. _prominent: https://docs.djangoproject.com/en/1.9/faq/install/#what-python-version-can-i-use-with-django +.. _projects: http://flask.pocoo.org/docs/0.10/python3/ +.. _Unicode literal support: https://www.python.org/dev/peps/pep-0414/ + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/db-dtypes/LICENSE b/packages/db-dtypes/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/db-dtypes/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/db-dtypes/MANIFEST.in b/packages/db-dtypes/MANIFEST.in new file mode 100644 index 000000000000..e783f4c6209b --- /dev/null +++ b/packages/db-dtypes/MANIFEST.in @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +include README.rst LICENSE +recursive-include google *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen diff --git a/packages/db-dtypes/README.rst b/packages/db-dtypes/README.rst new file mode 100644 index 000000000000..5ea347cdd126 --- /dev/null +++ b/packages/db-dtypes/README.rst @@ -0,0 +1,78 @@ +Pandas Data Types for SQL systems (BigQuery, Spanner) +===================================================== + +|beta| |pypi| |versions| + +`Pandas extension data types`_ for data from SQL systems such as `BigQuery`_. + +- `Library Documentation`_ + +.. |beta| image:: https://img.shields.io/badge/support-beta-orange.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#beta-support +.. |pypi| image:: https://img.shields.io/pypi/v/db-dtypes.svg + :target: https://pypi.org/project/db-dtypes/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/db-dtypes.svg + :target: https://pypi.org/project/db-dtypes/ +.. _Pandas extension data types: https://pandas.pydata.org/pandas-docs/stable/ecosystem.html#ecosystem-extensions +.. _BigQuery: https://cloud.google.com/bigquery/docs/ +.. _Library Documentation: https://googleapis.dev/python/db-dtypes/latest + + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. [Optional] `Enable billing for your project.`_ +3. `Enable the BigQuery Storage API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the BigQuery Storage API.: https://console.cloud.google.com/apis/library/bigquery.googleapis.com +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +------------ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Python >= 3.6 + +Unsupported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python <= 3.5. + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install db-dtypes + + +Windows +^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install db-dtypes diff --git a/packages/db-dtypes/SECURITY.md b/packages/db-dtypes/SECURITY.md new file mode 100644 index 000000000000..8b58ae9c01ae --- /dev/null +++ b/packages/db-dtypes/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/packages/db-dtypes/db_dtypes/__init__.py b/packages/db-dtypes/db_dtypes/__init__.py new file mode 100644 index 000000000000..c95b9622edb2 --- /dev/null +++ b/packages/db-dtypes/db_dtypes/__init__.py @@ -0,0 +1,22 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Pandas Data Types for SQL systems (BigQuery, Spanner) +""" + +from .version import __version__ + +__all__ = [ + "__version__", +] diff --git a/packages/db-dtypes/db_dtypes/version.py b/packages/db-dtypes/db_dtypes/version.py new file mode 100644 index 000000000000..450268d22912 --- /dev/null +++ b/packages/db-dtypes/db_dtypes/version.py @@ -0,0 +1,15 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +__version__ = "0.0.1" diff --git a/packages/db-dtypes/dev_requirements.txt b/packages/db-dtypes/dev_requirements.txt new file mode 100644 index 000000000000..31b4ad771eed --- /dev/null +++ b/packages/db-dtypes/dev_requirements.txt @@ -0,0 +1,7 @@ +sqlalchemy>=1.1.9 +google-cloud-bigquery>=1.6.0 +future==0.18.2 + +pytest==6.2.5 +pytest-flake8==1.0.7 +pytz==2021.1 diff --git a/packages/db-dtypes/docs/README.rst b/packages/db-dtypes/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/db-dtypes/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/db-dtypes/docs/_static/custom.css b/packages/db-dtypes/docs/_static/custom.css new file mode 100644 index 000000000000..b0a295464b23 --- /dev/null +++ b/packages/db-dtypes/docs/_static/custom.css @@ -0,0 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/db-dtypes/docs/_templates/layout.html b/packages/db-dtypes/docs/_templates/layout.html new file mode 100644 index 000000000000..95e9c77fcfe1 --- /dev/null +++ b/packages/db-dtypes/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/db-dtypes/docs/changelog.md b/packages/db-dtypes/docs/changelog.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/db-dtypes/docs/changelog.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/db-dtypes/docs/conf.py b/packages/db-dtypes/docs/conf.py new file mode 100644 index 000000000000..e9c68d4daf59 --- /dev/null +++ b/packages/db-dtypes/docs/conf.py @@ -0,0 +1,367 @@ +# -*- coding: utf-8 -*- +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# db-dtypes documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.5.5" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "recommonmark", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_options = {"members": True} +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = "db-dtypes" +copyright = "2019, Google" +author = "Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for db-dtypes", + "github_user": "googleapis", + "github_repo": "python-bigquery-sqlalchemy", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "db-dtypes-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (root_doc, "db-dtypes.tex", "db-dtypes Documentation", author, "manual",) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [(root_doc, "db-dtypes", "db-dtypes Documentation", [author], 1,)] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "db-dtypes", + "db-dtypes Documentation", + author, + "db-dtypes", + "db-dtypes Library", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/db-dtypes/docs/index.rst b/packages/db-dtypes/docs/index.rst new file mode 100644 index 000000000000..f5f0b6f845a9 --- /dev/null +++ b/packages/db-dtypes/docs/index.rst @@ -0,0 +1,19 @@ +.. include:: README.rst + +API Reference +------------- + +.. toctree:: + :maxdepth: 2 + + reference + +Changelog +--------- + +For a list of all ``db-dtypes`` releases: + +.. toctree:: + :maxdepth: 2 + + changelog diff --git a/packages/db-dtypes/docs/reference.rst b/packages/db-dtypes/docs/reference.rst new file mode 100644 index 000000000000..df1541d7d9e5 --- /dev/null +++ b/packages/db-dtypes/docs/reference.rst @@ -0,0 +1,4 @@ +API Reference +^^^^^^^^^^^^^ + +.. automodule:: db_dtypes.version diff --git a/packages/db-dtypes/noxfile.py b/packages/db-dtypes/noxfile.py new file mode 100644 index 000000000000..1e88b21b857f --- /dev/null +++ b/packages/db-dtypes/noxfile.py @@ -0,0 +1,310 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import +import os +import pathlib +import shutil + +import nox + + +BLACK_VERSION = "black==19.10b0" +BLACK_PATHS = ["docs", "db_dtypes", "tests", "noxfile.py", "setup.py"] + +DEFAULT_PYTHON_VERSION = "3.8" + +# We're using two Python versions to test with sqlalchemy 1.3 and 1.4. +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8", "3.9"] +UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "lint", + "unit", + "cover", + "system", + "compliance", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.stop_on_first_error = True +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", "--check", *BLACK_PATHS, + ) + session.run("flake8", "db_dtypes", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", *BLACK_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def default(session): + # Install all test dependencies, then install this package in-place. + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + session.install( + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", + "-c", + constraints_path, + ) + + if session.python == "3.8": + extras = "[tests,alembic]" + elif session.python == "3.9": + extras = "[tests,geography]" + else: + extras = "[tests]" + session.install("-e", f".{extras}", "-c", constraints_path) + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=db_dtypes", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +def unit(session): + """Run the unit test suite.""" + default(session) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + # Use pre-release gRPC for system tests. + session.install("--pre", "grpcio") + + # Install all test dependencies, then install this package into the + # virtualenv's dist-packages. + session.install("mock", "pytest", "google-cloud-testutils", "-c", constraints_path) + if session.python == "3.8": + extras = "[tests,alembic]" + elif session.python == "3.9": + extras = "[tests,geography]" + else: + extras = "[tests]" + session.install("-e", f".{extras}", "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def compliance(session): + """Run the SQLAlchemy dialect-compliance system tests""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_folder_path = os.path.join("tests", "sqlalchemy_dialect_compliance") + + if os.environ.get("RUN_COMPLIANCE_TESTS", "true") == "false": + session.skip("RUN_COMPLIANCE_TESTS is set to false, skipping") + if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): + session.skip("Credentials must be set via environment variable") + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + if not os.path.exists(system_test_folder_path): + session.skip("Compliance tests were not found") + + session.install("--pre", "grpcio") + + session.install( + "mock", + "pytest", + "pytest-rerunfailures", + "google-cloud-testutils", + "-c", + constraints_path, + ) + if session.python == "3.8": + extras = "[tests,alembic]" + elif session.python == "3.9": + extras = "[tests,geography]" + else: + extras = "[tests]" + session.install("-e", f".{extras}", "-c", constraints_path) + + session.run( + "py.test", + "-vv", + f"--junitxml=compliance_{session.python}_sponge_log.xml", + "--reruns=3", + "--reruns-delay=60", + "--only-rerun=403 Exceeded rate limits", + "--only-rerun=409 Already Exists", + "--only-rerun=404 Not found", + "--only-rerun=400 Cannot execute DML over a non-existent table", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", "alabaster", "geoalchemy2", "shapely", "recommonmark" + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + "sphinx==4.0.1", + "alabaster", + "geoalchemy2", + "shapely", + "recommonmark", + "gcp-sphinx-docfx-yaml", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) diff --git a/packages/db-dtypes/owlbot.py b/packages/db-dtypes/owlbot.py new file mode 100644 index 000000000000..5acefc945037 --- /dev/null +++ b/packages/db-dtypes/owlbot.py @@ -0,0 +1,97 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This script is used to synthesize generated parts of this library.""" + +import pathlib + +import synthtool as s +from synthtool import gcp +from synthtool.languages import python + +REPO_ROOT = pathlib.Path(__file__).parent.absolute() + +common = gcp.CommonTemplates() + +# ---------------------------------------------------------------------------- +# Add templated files +# ---------------------------------------------------------------------------- +templated_files = common.py_library( + unit_test_python_versions=["3.6", "3.7", "3.8", "3.9"], + system_test_python_versions=["3.8"], + cov_level=100, +) +s.move(templated_files, excludes=["docs/multiprocessing.rst"]) + +# ---------------------------------------------------------------------------- +# Fixup files +# ---------------------------------------------------------------------------- + +s.replace( + [".coveragerc"], "google/cloud/__init__.py", "db_dtypes/requirements.py", +) + +s.replace( + ["noxfile.py"], r"[\"']google[\"']", '"db_dtypes"', +) + +s.replace( + ["noxfile.py"], "google/cloud", "db_dtypes", +) + + +def place_before(path, text, *before_text, escape=None): + replacement = "\n".join(before_text) + "\n" + text + if escape: + for c in escape: + text = text.replace(c, "\\" + c) + s.replace([path], text, replacement) + + +place_before( + "noxfile.py", + "nox.options.error_on_missing_interpreters = True", + "nox.options.stop_on_first_error = True", +) + +old_sessions = """ + "unit", + "system", + "cover", + "lint", +""" + +new_sessions = """ + "lint", + "unit", + "cover", + "system", + "compliance", +""" + +s.replace(["noxfile.py"], old_sessions, new_sessions) + +# ---------------------------------------------------------------------------- +# Samples templates +# ---------------------------------------------------------------------------- + +python.py_samples(skip_readmes=True) + +# ---------------------------------------------------------------------------- +# Final cleanup +# ---------------------------------------------------------------------------- + +s.shell.run(["nox", "-s", "blacken"], hide_output=False) +for noxfile in REPO_ROOT.glob("samples/**/noxfile.py"): + s.shell.run(["nox", "-s", "blacken"], cwd=noxfile.parent, hide_output=False) diff --git a/packages/db-dtypes/release-procedure.md b/packages/db-dtypes/release-procedure.md new file mode 100644 index 000000000000..699758c6faa5 --- /dev/null +++ b/packages/db-dtypes/release-procedure.md @@ -0,0 +1,35 @@ +# db-dtypes release procedure + +* Checkout main branch + + git fetch upstream main + git checkout main + git rebase -i upstream/main + +* Update version number in `setup.py` + +* Update `CHANGELOG.md` + +* Commit and push + + git commit -m "Release x.x.x" + git push upstream main + +* Build the package + + git clean -xfd + python setup.py register sdist bdist_wheel --universal + +* Upload to test PyPI + + twine upload --repository testpypi dist/* + +* Try out test PyPI package + + pip install --upgrade --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple db-dtypes + +* Upload to PyPI + + twine upload dist/* + +* Tag release on GitHub diff --git a/packages/db-dtypes/renovate.json b/packages/db-dtypes/renovate.json new file mode 100644 index 000000000000..c21036d385e5 --- /dev/null +++ b/packages/db-dtypes/renovate.json @@ -0,0 +1,12 @@ +{ + "extends": [ + "config:base", + "group:all", + ":preserveSemverRanges", + ":disableDependencyDashboard" + ], + "ignorePaths": [".pre-commit-config.yaml"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } +} diff --git a/packages/db-dtypes/scripts/decrypt-secrets.sh b/packages/db-dtypes/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..21f6d2a26d90 --- /dev/null +++ b/packages/db-dtypes/scripts/decrypt-secrets.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/db-dtypes/scripts/readme-gen/readme_gen.py b/packages/db-dtypes/scripts/readme-gen/readme_gen.py new file mode 100644 index 000000000000..d0cc15825097 --- /dev/null +++ b/packages/db-dtypes/scripts/readme-gen/readme_gen.py @@ -0,0 +1,68 @@ +#!/usr/bin/env python + +# Copyright 2016 Google Inc +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Generates READMEs using configuration defined in yaml.""" + +import argparse +import io +import os +import subprocess + +import jinja2 +import yaml + + +jinja_env = jinja2.Environment( + trim_blocks=True, + loader=jinja2.FileSystemLoader( + os.path.abspath(os.path.join(os.path.dirname(__file__), "templates")) + ), +) + +README_TMPL = jinja_env.get_template("README.tmpl.rst") + + +def get_help(file): + return subprocess.check_output(["python", file, "--help"]).decode() + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("source") + parser.add_argument("--destination", default="README.rst") + + args = parser.parse_args() + + source = os.path.abspath(args.source) + root = os.path.dirname(source) + destination = os.path.join(root, args.destination) + + jinja_env.globals["get_help"] = get_help + + with io.open(source, "r") as f: + config = yaml.load(f) + + # This allows get_help to execute in the right directory. + os.chdir(root) + + output = README_TMPL.render(config) + + with io.open(destination, "w") as f: + f.write(output) + + +if __name__ == "__main__": + main() diff --git a/packages/db-dtypes/scripts/readme-gen/templates/README.tmpl.rst b/packages/db-dtypes/scripts/readme-gen/templates/README.tmpl.rst new file mode 100644 index 000000000000..30ad03d050d8 --- /dev/null +++ b/packages/db-dtypes/scripts/readme-gen/templates/README.tmpl.rst @@ -0,0 +1,87 @@ +{# The following line is a lie. BUT! Once jinja2 is done with it, it will + become truth! #} +.. This file is automatically generated. Do not edit this file directly. + +{{product.name}} Python Samples +=============================================================================== + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst + + +This directory contains samples for {{product.name}}. {{product.description}} + +{{description}} + +.. _{{product.name}}: {{product.url}} + +{% if required_api_url %} +To run the sample, you need to enable the API at: {{required_api_url}} +{% endif %} + +{% if required_role %} +To run the sample, you need to have `{{required_role}}` role. +{% endif %} + +{{other_required_steps}} + +{% if setup %} +Setup +------------------------------------------------------------------------------- + +{% for section in setup %} + +{% include section + '.tmpl.rst' %} + +{% endfor %} +{% endif %} + +{% if samples %} +Samples +------------------------------------------------------------------------------- + +{% for sample in samples %} +{{sample.name}} ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +{% if not sample.hide_cloudshell_button %} +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst +{% endif %} + + +{{sample.description}} + +To run this sample: + +.. code-block:: bash + + $ python {{sample.file}} +{% if sample.show_help %} + + {{get_help(sample.file)|indent}} +{% endif %} + + +{% endfor %} +{% endif %} + +{% if cloud_client_library %} + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. _Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. _browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. _report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + +{% endif %} + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ diff --git a/packages/db-dtypes/scripts/readme-gen/templates/auth.tmpl.rst b/packages/db-dtypes/scripts/readme-gen/templates/auth.tmpl.rst new file mode 100644 index 000000000000..1446b94a5e3a --- /dev/null +++ b/packages/db-dtypes/scripts/readme-gen/templates/auth.tmpl.rst @@ -0,0 +1,9 @@ +Authentication +++++++++++++++ + +This sample requires you to have authentication setup. Refer to the +`Authentication Getting Started Guide`_ for instructions on setting up +credentials for applications. + +.. _Authentication Getting Started Guide: + https://cloud.google.com/docs/authentication/getting-started diff --git a/packages/db-dtypes/scripts/readme-gen/templates/auth_api_key.tmpl.rst b/packages/db-dtypes/scripts/readme-gen/templates/auth_api_key.tmpl.rst new file mode 100644 index 000000000000..11957ce2714a --- /dev/null +++ b/packages/db-dtypes/scripts/readme-gen/templates/auth_api_key.tmpl.rst @@ -0,0 +1,14 @@ +Authentication +++++++++++++++ + +Authentication for this service is done via an `API Key`_. To obtain an API +Key: + +1. Open the `Cloud Platform Console`_ +2. Make sure that billing is enabled for your project. +3. From the **Credentials** page, create a new **API Key** or use an existing + one for your project. + +.. _API Key: + https://developers.google.com/api-client-library/python/guide/aaa_apikeys +.. _Cloud Console: https://console.cloud.google.com/project?_ diff --git a/packages/db-dtypes/scripts/readme-gen/templates/install_deps.tmpl.rst b/packages/db-dtypes/scripts/readme-gen/templates/install_deps.tmpl.rst new file mode 100644 index 000000000000..275d649890d7 --- /dev/null +++ b/packages/db-dtypes/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -0,0 +1,29 @@ +Install Dependencies +++++++++++++++++++++ + +#. Clone python-docs-samples and change directory to the sample directory you want to use. + + .. code-block:: bash + + $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. + + .. _Python Development Environment Setup Guide: + https://cloud.google.com/python/setup + +#. Create a virtualenv. Samples are compatible with Python 3.6+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ diff --git a/packages/db-dtypes/scripts/readme-gen/templates/install_portaudio.tmpl.rst b/packages/db-dtypes/scripts/readme-gen/templates/install_portaudio.tmpl.rst new file mode 100644 index 000000000000..5ea33d18c00c --- /dev/null +++ b/packages/db-dtypes/scripts/readme-gen/templates/install_portaudio.tmpl.rst @@ -0,0 +1,35 @@ +Install PortAudio ++++++++++++++++++ + +Install `PortAudio`_. This is required by the `PyAudio`_ library to stream +audio from your computer's microphone. PyAudio depends on PortAudio for cross-platform compatibility, and is installed differently depending on the +platform. + +* For Mac OS X, you can use `Homebrew`_:: + + brew install portaudio + + **Note**: if you encounter an error when running `pip install` that indicates + it can't find `portaudio.h`, try running `pip install` with the following + flags:: + + pip install --global-option='build_ext' \ + --global-option='-I/usr/local/include' \ + --global-option='-L/usr/local/lib' \ + pyaudio + +* For Debian / Ubuntu Linux:: + + apt-get install portaudio19-dev python-all-dev + +* Windows may work without having to install PortAudio explicitly (it will get + installed with PyAudio). + +For more details, see the `PyAudio installation`_ page. + + +.. _PyAudio: https://people.csail.mit.edu/hubert/pyaudio/ +.. _PortAudio: http://www.portaudio.com/ +.. _PyAudio installation: + https://people.csail.mit.edu/hubert/pyaudio/#downloads +.. _Homebrew: http://brew.sh diff --git a/packages/db-dtypes/setup.cfg b/packages/db-dtypes/setup.cfg new file mode 100644 index 000000000000..bf8e3853ba9c --- /dev/null +++ b/packages/db-dtypes/setup.cfg @@ -0,0 +1,27 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[bdist_wheel] +universal = 1 + +[sqla_testing] +requirement_cls=db_dtypes.requirements:Requirements +profile_file=.sqlalchemy_dialect_compliance-profiles.txt + +[tool:pytest] +addopts= --tb native -v -r fxX -p no:warnings +python_files=tests/*test_*.py diff --git a/packages/db-dtypes/setup.py b/packages/db-dtypes/setup.py new file mode 100644 index 000000000000..a75960e22efb --- /dev/null +++ b/packages/db-dtypes/setup.py @@ -0,0 +1,76 @@ +#!/usr/bin/env python +# Copyright (c) 2017 The db-dtypes Authors +# +# Permission is hereby granted, free of charge, to any person obtaining a copy of +# this software and associated documentation files (the "Software"), to deal in +# the Software without restriction, including without limitation the rights to +# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +# the Software, and to permit persons to whom the Software is furnished to do so, +# subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +import io +import os +import re +from setuptools import setup + +# Package metadata. + +name = "db-dtypes" +description = "SQLAlchemy dialect for BigQuery" + +# Should be one of: +# 'Development Status :: 3 - Alpha' +# 'Development Status :: 4 - Beta' +# 'Development Status :: 5 - Production/Stable' +release_status = "Development Status :: 4 - Beta" + +package_root = os.path.abspath(os.path.dirname(__file__)) +with open(os.path.join(package_root, "db_dtypes", "version.py")) as f: + version = re.search('__version__ = "([^"]+)"', f.read()).group(1) + + +def readme(): + with io.open("README.rst", "r", encoding="utf8") as f: + return f.read() + + +setup( + name=name, + version=version, + description=description, + long_description=readme(), + long_description_content_type="text/x-rst", + author="The db-dtypes Authors", + author_email="googleapis-packages@google.com", + packages=["db_dtypes"], + url="https://github.com/googleapis/python-db-dtypes-pandas", + keywords=["sql", "pandas"], + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Operating System :: OS Independent", + "Topic :: Database :: Front-Ends", + ], + platforms="Posix; MacOS X; Windows", + install_requires=[], + python_requires=">=3.6, <3.10", + tests_require=["packaging", "pytz"], + entry_points={"sqlalchemy.dialects": ["bigquery = db_dtypes:BigQueryDialect"]}, +) diff --git a/packages/db-dtypes/testing/.gitignore b/packages/db-dtypes/testing/.gitignore new file mode 100644 index 000000000000..40f47fa771e9 --- /dev/null +++ b/packages/db-dtypes/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json diff --git a/packages/db-dtypes/testing/constraints-3.10.txt b/packages/db-dtypes/testing/constraints-3.10.txt new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/db-dtypes/testing/constraints-3.6.txt b/packages/db-dtypes/testing/constraints-3.6.txt new file mode 100644 index 000000000000..60421130e299 --- /dev/null +++ b/packages/db-dtypes/testing/constraints-3.6.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List *all* library dependencies and extras in this file. +# Pin the version to the lower bound. +# +# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", +sqlalchemy==1.2.0 +google-auth==1.25.0 +google-cloud-bigquery==2.25.2 +google-api-core==1.30.0 diff --git a/packages/db-dtypes/testing/constraints-3.7.txt b/packages/db-dtypes/testing/constraints-3.7.txt new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/db-dtypes/testing/constraints-3.8.txt b/packages/db-dtypes/testing/constraints-3.8.txt new file mode 100644 index 000000000000..4884f96af3ef --- /dev/null +++ b/packages/db-dtypes/testing/constraints-3.8.txt @@ -0,0 +1 @@ +sqlalchemy==1.3.24 diff --git a/packages/db-dtypes/testing/constraints-3.9.txt b/packages/db-dtypes/testing/constraints-3.9.txt new file mode 100644 index 000000000000..eebb9da6fc72 --- /dev/null +++ b/packages/db-dtypes/testing/constraints-3.9.txt @@ -0,0 +1 @@ +sqlalchemy>=1.4.13 diff --git a/packages/db-dtypes/tests/conftest.py b/packages/db-dtypes/tests/conftest.py new file mode 100644 index 000000000000..f52c4d6ad337 --- /dev/null +++ b/packages/db-dtypes/tests/conftest.py @@ -0,0 +1,28 @@ +# Copyright (c) 2017 The db-dtypes Authors +# +# Permission is hereby granted, free of charge, to any person obtaining a copy of +# this software and associated documentation files (the "Software"), to deal in +# the Software without restriction, including without limitation the rights to +# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +# the Software, and to permit persons to whom the Software is furnished to do so, +# subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +from sqlalchemy.dialects import registry + +registry.register("bigquery", "db_dtypes", "BigQueryDialect") + +# sqlalchemy's dialect-testing machinery wants an entry like this. +# This seems to be based around dialects maybe having multiple drivers +# and wanting to test drover-specific URLs, but doesn't seem to make +# much sense for dialects with only one driver. ¯\_(ツ)_/¯ +registry.register("bigquery.bigquery", "db_dtypes", "BigQueryDialect") From 7fcd8f378d1ae84d0b8e7e6c7c1963c3d6a1d600 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Mon, 20 Sep 2021 17:05:02 -0500 Subject: [PATCH 002/210] chore: correct setup.py --- packages/db-dtypes/setup.py | 28 +++++++++++----------------- 1 file changed, 11 insertions(+), 17 deletions(-) diff --git a/packages/db-dtypes/setup.py b/packages/db-dtypes/setup.py index a75960e22efb..5d41c5aa7f17 100644 --- a/packages/db-dtypes/setup.py +++ b/packages/db-dtypes/setup.py @@ -1,22 +1,16 @@ -#!/usr/bin/env python -# Copyright (c) 2017 The db-dtypes Authors +# Copyright 2021 Google LLC # -# Permission is hereby granted, free of charge, to any person obtaining a copy of -# this software and associated documentation files (the "Software"), to deal in -# the Software without restriction, including without limitation the rights to -# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -# the Software, and to permit persons to whom the Software is furnished to do so, -# subject to the following conditions: +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at # -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. +# https://www.apache.org/licenses/LICENSE-2.0 # -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. import io import os @@ -26,7 +20,7 @@ # Package metadata. name = "db-dtypes" -description = "SQLAlchemy dialect for BigQuery" +description = "Pandas Data Types for SQL systems (BigQuery, Spanner)" # Should be one of: # 'Development Status :: 3 - Alpha' From 90941818fd00c03adbffef3bc6c787bfd9915cd6 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Tue, 21 Sep 2021 14:04:57 -0500 Subject: [PATCH 003/210] feat: add `time` and `date` dtypes Migrated from BigQuery https://github.com/googleapis/python-bigquery/pull/972 --- packages/db-dtypes/db_dtypes/__init__.py | 172 ++++- packages/db-dtypes/db_dtypes/core.py | 210 ++++++ .../db-dtypes/db_dtypes/pandas_backports.py | 132 ++++ packages/db-dtypes/samples/__init__.py | 0 packages/db-dtypes/samples/pytest.ini | 0 .../snippets/__init__.py} | 12 +- .../db-dtypes/samples/snippets/noxfile.py | 266 ++++++++ .../samples/snippets/pandas_date_and_time.py | 84 +++ .../snippets/pandas_date_and_time_test.py | 59 ++ .../samples/snippets/requirements-test.txt | 1 + .../samples/snippets/requirements.txt | 3 + packages/db-dtypes/setup.py | 10 +- packages/db-dtypes/tests/unit/test_dtypes.py | 616 ++++++++++++++++++ 13 files changed, 1552 insertions(+), 13 deletions(-) create mode 100644 packages/db-dtypes/db_dtypes/core.py create mode 100644 packages/db-dtypes/db_dtypes/pandas_backports.py create mode 100644 packages/db-dtypes/samples/__init__.py create mode 100644 packages/db-dtypes/samples/pytest.ini rename packages/db-dtypes/{tests/conftest.py => samples/snippets/__init__.py} (68%) create mode 100644 packages/db-dtypes/samples/snippets/noxfile.py create mode 100644 packages/db-dtypes/samples/snippets/pandas_date_and_time.py create mode 100644 packages/db-dtypes/samples/snippets/pandas_date_and_time_test.py create mode 100644 packages/db-dtypes/samples/snippets/requirements-test.txt create mode 100644 packages/db-dtypes/samples/snippets/requirements.txt create mode 100644 packages/db-dtypes/tests/unit/test_dtypes.py diff --git a/packages/db-dtypes/db_dtypes/__init__.py b/packages/db-dtypes/db_dtypes/__init__.py index c95b9622edb2..8a58666e7f52 100644 --- a/packages/db-dtypes/db_dtypes/__init__.py +++ b/packages/db-dtypes/db_dtypes/__init__.py @@ -15,8 +15,178 @@ Pandas Data Types for SQL systems (BigQuery, Spanner) """ -from .version import __version__ +import datetime + +import numpy +import pandas +import pandas.compat.numpy.function +import pandas.core.algorithms +import pandas.core.arrays +import pandas.core.dtypes.base +import pandas.core.dtypes.dtypes +import pandas.core.dtypes.generic +import pandas.core.nanops +import pyarrow + +from db_dtypes.version import __version__ +from db_dtypes import core + + +date_dtype_name = "date" +time_dtype_name = "time" + + +@pandas.core.dtypes.dtypes.register_extension_dtype +class TimeDtype(core.BaseDatetimeDtype): + """ + Extension dtype for time data. + """ + + name = time_dtype_name + type = datetime.time + + def construct_array_type(self): + return TimeArray + + +class TimeArray(core.BaseDatetimeArray): + """ + Pandas array type containing time data + """ + + # Data are stored as datetime64 values with a date of Jan 1, 1970 + + dtype = TimeDtype() + _epoch = datetime.datetime(1970, 1, 1) + _npepoch = numpy.datetime64(_epoch) + + @classmethod + def _datetime(cls, scalar): + if isinstance(scalar, datetime.time): + return datetime.datetime.combine(cls._epoch, scalar) + elif isinstance(scalar, str): + # iso string + h, m, s = map(float, scalar.split(":")) + s, us = divmod(s, 1) + return datetime.datetime( + 1970, 1, 1, int(h), int(m), int(s), int(us * 1000000) + ) + else: + raise TypeError("Invalid value type", scalar) + + def _box_func(self, x): + if pandas.isnull(x): + return None + + try: + return x.astype(" Scalar: + pandas.compat.numpy.function.validate_min((), kwargs) + result = pandas.core.nanops.nanmin( + values=self._ndarray, axis=axis, mask=self.isna(), skipna=skipna + ) + return self._box_func(result) + + def max( + self, *, axis: Optional[int] = None, skipna: bool = True, **kwargs + ) -> Scalar: + pandas.compat.numpy.function.validate_max((), kwargs) + result = pandas.core.nanops.nanmax( + values=self._ndarray, axis=axis, mask=self.isna(), skipna=skipna + ) + return self._box_func(result) + + if pandas_release >= (1, 2): + + def median( + self, + *, + axis: Optional[int] = None, + out=None, + overwrite_input: bool = False, + keepdims: bool = False, + skipna: bool = True, + ): + pandas.compat.numpy.function.validate_median( + (), + {"out": out, "overwrite_input": overwrite_input, "keepdims": keepdims}, + ) + result = pandas.core.nanops.nanmedian( + self._ndarray, axis=axis, skipna=skipna + ) + return self._box_func(result) diff --git a/packages/db-dtypes/db_dtypes/pandas_backports.py b/packages/db-dtypes/db_dtypes/pandas_backports.py new file mode 100644 index 000000000000..bfeb148b9dea --- /dev/null +++ b/packages/db-dtypes/db_dtypes/pandas_backports.py @@ -0,0 +1,132 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Utilities to support older pandas versions. + +These backported versions are simpler and, in some cases, less featureful than +the versions in the later versions of pandas. +""" + +import operator + +import numpy +import packaging.version +import pandas +from pandas._libs.lib import is_integer + + +pandas_release = packaging.version.parse(pandas.__version__).release + + +def import_default(module_name, force=False, default=None): + if default is None: + return lambda func: import_default(module_name, force, func) + + if force: + return default + + name = default.__name__ + try: + module = __import__(module_name, {}, {}, [name]) + except ModuleNotFoundError: + return default + + return getattr(module, name, default) + + +@import_default("pandas.core.arraylike") +class OpsMixin: + def _cmp_method(self, other, op): # pragma: NO COVER + return NotImplemented + + def __eq__(self, other): + return self._cmp_method(other, operator.eq) + + def __ne__(self, other): + return self._cmp_method(other, operator.ne) + + def __lt__(self, other): + return self._cmp_method(other, operator.lt) + + def __le__(self, other): + return self._cmp_method(other, operator.le) + + def __gt__(self, other): + return self._cmp_method(other, operator.gt) + + def __ge__(self, other): + return self._cmp_method(other, operator.ge) + + __add__ = __radd__ = __sub__ = lambda self, other: NotImplemented + + +@import_default("pandas.core.arrays._mixins", pandas_release < (1, 3)) +class NDArrayBackedExtensionArray(pandas.core.arrays.base.ExtensionArray): + + ndim = 1 + + def __init__(self, values, dtype): + assert isinstance(values, numpy.ndarray) + assert values.ndim == 1 + self._ndarray = values + self._dtype = dtype + + @classmethod + def _from_backing_data(cls, data): + return cls(data, data.dtype) + + def __getitem__(self, index): + value = self._ndarray[index] + if is_integer(index): + return self._box_func(value) + return self.__class__(value, self._dtype) + + def __setitem__(self, index, value): + self._ndarray[index] = value + + def __len__(self): + return len(self._ndarray) + + @property + def shape(self): + return self._ndarray.shape + + @property + def ndim(self) -> int: + return self._ndarray.ndim + + @property + def size(self) -> int: + return self._ndarray.size + + @property + def nbytes(self) -> int: + return self._ndarray.nbytes + + def copy(self): + return self[:] + + def repeat(self, n): + return self.__class__(self._ndarray.repeat(n), self._dtype) + + @classmethod + def _concat_same_type(cls, to_concat, axis=0): + dtypes = {str(x.dtype) for x in to_concat} + if len(dtypes) != 1: + raise ValueError("to_concat must have the same dtype (tz)", dtypes) + + new_values = [x._ndarray for x in to_concat] + new_values = numpy.concatenate(new_values, axis=axis) + return to_concat[0]._from_backing_data(new_values) # type: ignore[arg-type] diff --git a/packages/db-dtypes/samples/__init__.py b/packages/db-dtypes/samples/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/db-dtypes/samples/pytest.ini b/packages/db-dtypes/samples/pytest.ini new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/db-dtypes/tests/conftest.py b/packages/db-dtypes/samples/snippets/__init__.py similarity index 68% rename from packages/db-dtypes/tests/conftest.py rename to packages/db-dtypes/samples/snippets/__init__.py index f52c4d6ad337..fa3a9cd632b9 100644 --- a/packages/db-dtypes/tests/conftest.py +++ b/packages/db-dtypes/samples/snippets/__init__.py @@ -1,4 +1,4 @@ -# Copyright (c) 2017 The db-dtypes Authors +# Copyright (c) 2021 The sqlalchemy-bigquery Authors # # Permission is hereby granted, free of charge, to any person obtaining a copy of # this software and associated documentation files (the "Software"), to deal in @@ -17,12 +17,4 @@ # IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -from sqlalchemy.dialects import registry - -registry.register("bigquery", "db_dtypes", "BigQueryDialect") - -# sqlalchemy's dialect-testing machinery wants an entry like this. -# This seems to be based around dialects maybe having multiple drivers -# and wanting to test drover-specific URLs, but doesn't seem to make -# much sense for dialects with only one driver. ¯\_(ツ)_/¯ -registry.register("bigquery.bigquery", "db_dtypes", "BigQueryDialect") +__version__ = "1.0.0-a1" diff --git a/packages/db-dtypes/samples/snippets/noxfile.py b/packages/db-dtypes/samples/snippets/noxfile.py new file mode 100644 index 000000000000..b008613f03ff --- /dev/null +++ b/packages/db-dtypes/samples/snippets/noxfile.py @@ -0,0 +1,266 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import os +from pathlib import Path +import sys +from typing import Callable, Dict, List, Optional + +import nox + + +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING +# DO NOT EDIT THIS FILE EVER! +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING + +BLACK_VERSION = "black==19.10b0" + +# Copy `noxfile_config.py` to your directory and modify it instead. + +# `TEST_CONFIG` dict is a configuration hook that allows users to +# modify the test configurations. The values here should be in sync +# with `noxfile_config.py`. Users will copy `noxfile_config.py` into +# their directory and modify it. + +TEST_CONFIG = { + # You can opt out from the test for specific Python versions. + "ignored_versions": [], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": False, + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + "envs": {}, +} + + +try: + # Ensure we can import noxfile_config in the project's directory. + sys.path.append(".") + from noxfile_config import TEST_CONFIG_OVERRIDE +except ImportError as e: + print("No user noxfile_config found: detail: {}".format(e)) + TEST_CONFIG_OVERRIDE = {} + +# Update the TEST_CONFIG with the user supplied values. +TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) + + +def get_pytest_env_vars() -> Dict[str, str]: + """Returns a dict for pytest invocation.""" + ret = {} + + # Override the GCLOUD_PROJECT and the alias. + env_key = TEST_CONFIG["gcloud_project_env"] + # This should error out if not set. + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] + + # Apply user supplied envs. + ret.update(TEST_CONFIG["envs"]) + return ret + + +# DO NOT EDIT - automatically generated. +# All versions used to test samples. +ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] + +# Any default versions that should be ignored. +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] + +TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) + +INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( + "True", + "true", +) +# +# Style Checks +# + + +def _determine_local_import_names(start_dir: str) -> List[str]: + """Determines all import names that should be considered "local". + + This is used when running the linter to insure that import order is + properly checked. + """ + file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] + return [ + basename + for basename, extension in file_ext_pairs + if extension == ".py" + or os.path.isdir(os.path.join(start_dir, basename)) + and basename not in ("__pycache__") + ] + + +# Linting with flake8. +# +# We ignore the following rules: +# E203: whitespace before ‘:’ +# E266: too many leading ‘#’ for block comment +# E501: line too long +# I202: Additional newline in a section of imports +# +# We also need to specify the rules which are ignored by default: +# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] +FLAKE8_COMMON_ARGS = [ + "--show-source", + "--builtin=gettext", + "--max-complexity=20", + "--import-order-style=google", + "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", + "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", + "--max-line-length=88", +] + + +@nox.session +def lint(session: nox.sessions.Session) -> None: + if not TEST_CONFIG["enforce_type_hints"]: + session.install("flake8", "flake8-import-order") + else: + session.install("flake8", "flake8-import-order", "flake8-annotations") + + local_names = _determine_local_import_names(".") + args = FLAKE8_COMMON_ARGS + [ + "--application-import-names", + ",".join(local_names), + ".", + ] + session.run("flake8", *args) + + +# +# Black +# + + +@nox.session +def blacken(session: nox.sessions.Session) -> None: + session.install(BLACK_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + session.run("black", *python_files) + + +# +# Sample Tests +# + + +PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] + + +def _session_tests( + session: nox.sessions.Session, post_install: Callable = None +) -> None: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") + else: + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) + + +@nox.session(python=ALL_VERSIONS) +def py(session: nox.sessions.Session) -> None: + """Runs py.test for a sample using the specified version of Python.""" + if session.python in TESTED_VERSIONS: + _session_tests(session) + else: + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) + + +# +# Readmegen +# + + +def _get_repo_root() -> Optional[str]: + """ Returns the root folder of the project. """ + # Get root of this repository. Assume we don't have directories nested deeper than 10 items. + p = Path(os.getcwd()) + for i in range(10): + if p is None: + break + if Path(p / ".git").exists(): + return str(p) + # .git is not available in repos cloned via Cloud Build + # setup.py is always in the library's root, so use that instead + # https://github.com/googleapis/synthtool/issues/792 + if Path(p / "setup.py").exists(): + return str(p) + p = p.parent + raise Exception("Unable to detect repository root.") + + +GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) + + +@nox.session +@nox.parametrize("path", GENERATED_READMES) +def readmegen(session: nox.sessions.Session, path: str) -> None: + """(Re-)generates the readme for a sample.""" + session.install("jinja2", "pyyaml") + dir_ = os.path.dirname(path) + + if os.path.exists(os.path.join(dir_, "requirements.txt")): + session.install("-r", os.path.join(dir_, "requirements.txt")) + + in_file = os.path.join(dir_, "README.rst.in") + session.run( + "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file + ) diff --git a/packages/db-dtypes/samples/snippets/pandas_date_and_time.py b/packages/db-dtypes/samples/snippets/pandas_date_and_time.py new file mode 100644 index 000000000000..d6b402998333 --- /dev/null +++ b/packages/db-dtypes/samples/snippets/pandas_date_and_time.py @@ -0,0 +1,84 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def pandas_date_and_time(): + # fmt: off + # [START bigquery_date_create] + + import datetime + import pandas as pd + import db_dtypes # noqa import to register dtypes + + dates = pd.Series( + [datetime.date(2021, 9, 17), '2021-9-18'], + dtype='date') + + # [END bigquery_date_create] + # [START bigquery_date_as_datetime] + + datetimes = dates.astype("datetime64") + + # [END bigquery_date_as_datetime] + # [START bigquery_date_sub] + + dates2 = pd.Series(['2021-1-1', '2021-1-2'], dtype='date') + diffs = dates - dates2 + + # [END bigquery_date_sub] + # [START bigquery_date_do] + + do = pd.DateOffset(days=1) + after = dates + do + before = dates - do + + # [END bigquery_date_do] + # [START bigquery_time_create] + + times = pd.Series( + [datetime.time(1, 2, 3, 456789), '12:00:00.6'], + dtype='time') + + # [END bigquery_time_create] + # [START bigquery_time_as_timedelta] + + timedeltas = times.astype("timedelta64") + + # [END bigquery_time_as_timedelta] + # [START bigquery_combine_date_time] + + combined = datetimes + timedeltas + + # [END bigquery_combine_date_time] + combined0 = combined + # [START bigquery_combine2_date_time] + + combined = dates + times + + # [END bigquery_combine2_date_time] + # fmt: on + + return ( + dates, + datetimes, + dates2, + diffs, + do, + after, + before, + times, + timedeltas, + combined, + combined0, + ) diff --git a/packages/db-dtypes/samples/snippets/pandas_date_and_time_test.py b/packages/db-dtypes/samples/snippets/pandas_date_and_time_test.py new file mode 100644 index 000000000000..b6735c62ebab --- /dev/null +++ b/packages/db-dtypes/samples/snippets/pandas_date_and_time_test.py @@ -0,0 +1,59 @@ +# Copyright 2021 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime + +import numpy as np +from pandas import Timestamp + + +def test_pandas_date_and_time(): + from .pandas_date_and_time import pandas_date_and_time + + ( + dates, + _, + dates2, + diffs, + do, + after, + before, + times, + _, + combined, + combined0, + ) = pandas_date_and_time() + + assert str(dates.dtype) == "date" + assert list(dates) == [datetime.date(2021, 9, 17), datetime.date(2021, 9, 18)] + + assert np.array_equal( + diffs, dates.astype("datetime64") - dates2.astype("datetime64"), + ) + + assert np.array_equal(after, dates.astype("object") + do) + assert np.array_equal(before, dates.astype("object") - do) + + assert str(times.dtype) == "time" + assert list(times) == [ + datetime.time(1, 2, 3, 456789), + datetime.time(12, 0, 0, 600000), + ] + + for c in combined0, combined: + assert str(c.dtype) == "datetime64[ns]" + assert list(c) == [ + Timestamp("2021-09-17 01:02:03.456789"), + Timestamp("2021-09-18 12:00:00.600000"), + ] diff --git a/packages/db-dtypes/samples/snippets/requirements-test.txt b/packages/db-dtypes/samples/snippets/requirements-test.txt new file mode 100644 index 000000000000..927094516e65 --- /dev/null +++ b/packages/db-dtypes/samples/snippets/requirements-test.txt @@ -0,0 +1 @@ +pytest==6.2.5 diff --git a/packages/db-dtypes/samples/snippets/requirements.txt b/packages/db-dtypes/samples/snippets/requirements.txt new file mode 100644 index 000000000000..facdb2715f66 --- /dev/null +++ b/packages/db-dtypes/samples/snippets/requirements.txt @@ -0,0 +1,3 @@ +numpy +pandas +pyarrow diff --git a/packages/db-dtypes/setup.py b/packages/db-dtypes/setup.py index 5d41c5aa7f17..cd4accd19a48 100644 --- a/packages/db-dtypes/setup.py +++ b/packages/db-dtypes/setup.py @@ -28,6 +28,13 @@ # 'Development Status :: 5 - Production/Stable' release_status = "Development Status :: 4 - Beta" +dependencies = [ + "packaging >= 14.3", + "pandas", + "pyarrow", + "numpy", +] + package_root = os.path.abspath(os.path.dirname(__file__)) with open(os.path.join(package_root, "db_dtypes", "version.py")) as f: version = re.search('__version__ = "([^"]+)"', f.read()).group(1) @@ -65,6 +72,5 @@ def readme(): platforms="Posix; MacOS X; Windows", install_requires=[], python_requires=">=3.6, <3.10", - tests_require=["packaging", "pytz"], - entry_points={"sqlalchemy.dialects": ["bigquery = db_dtypes:BigQueryDialect"]}, + tests_require=["pytest"], ) diff --git a/packages/db-dtypes/tests/unit/test_dtypes.py b/packages/db-dtypes/tests/unit/test_dtypes.py new file mode 100644 index 000000000000..572915e895f9 --- /dev/null +++ b/packages/db-dtypes/tests/unit/test_dtypes.py @@ -0,0 +1,616 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime + +import packaging.version +import pytest + +pd = pytest.importorskip("pandas") +np = pytest.importorskip("numpy") + +pandas_release = packaging.version.parse(pd.__version__).release + +SAMPLE_RAW_VALUES = dict( + date=(datetime.date(2021, 2, 2), "2021-2-3", None), + time=(datetime.time(1, 2, 2), "1:2:3.5", None), +) +SAMPLE_VALUES = dict( + date=( + datetime.date(2021, 2, 2), + datetime.date(2021, 2, 3), + datetime.date(2021, 2, 4), + datetime.date(2021, 2, 5), + ), + time=( + datetime.time(1, 2, 2), + datetime.time(1, 2, 3, 500000), + datetime.time(1, 2, 4, 500000), + datetime.time(1, 2, 5, 500000), + ), +) +SAMPLE_DT_VALUES = dict( + date=( + "2021-02-02T00:00:00.000000", + "2021-02-03T00:00:00.000000", + "2021-02-04T00:00:00.000000", + "2021-02-05T00:00:00.000000", + ), + time=( + "1970-01-01T01:02:02.000000", + "1970-01-01T01:02:03.500000", + "1970-01-01T01:02:04.500000", + "1970-01-01T01:02:05.500000", + ), +) + +for_date_and_time = pytest.mark.parametrize("dtype", ["date", "time"]) + + +@pytest.fixture(autouse=True) +def register_dtype(): + import db_dtypes # noqa + + +def _cls(dtype): + import db_dtypes + + return getattr(db_dtypes, dtype.capitalize() + "Array") + + +def _make_one(dtype): + return _cls(dtype)._from_sequence(SAMPLE_RAW_VALUES[dtype]) + + +@for_date_and_time +@pytest.mark.parametrize( + "factory_method", [None, "_from_sequence", "_from_sequence_of_strings"] +) +def test_array_construction(dtype, factory_method): + sample_raw_values = SAMPLE_RAW_VALUES[dtype] + factory = _cls(dtype) + if factory_method: + factory = getattr(factory, factory_method) + if factory_method == "_from_sequence_of_strings": + sample_raw_values = [ + str(v) if v is not None else v for v in sample_raw_values + ] + a = factory(sample_raw_values) + assert len(a) == 3 + assert a.size == 3 + assert a.shape == (3,) + sample_values = SAMPLE_VALUES[dtype] + assert a[0], a[1] == sample_values[:2] + assert a[2] is None + + # implementation details: + assert a.nbytes == 24 + assert np.array_equal( + a._ndarray + == np.array(SAMPLE_DT_VALUES[dtype][:2] + ("NaT",), dtype="datetime64[us]"), + [True, True, False], + ) + + +@for_date_and_time +def test_array_construction_bad_vaue_type(dtype): + with pytest.raises(TypeError, match="Invalid value type"): + _cls(dtype)._from_sequence([42]) + + +@for_date_and_time +def test_time_series_construction(dtype): + sample_values = SAMPLE_VALUES[dtype] + s = pd.Series(SAMPLE_RAW_VALUES[dtype], dtype=dtype) + assert len(s) == 3 + assert s[0], s[1] == sample_values[:2] + assert s[2] is None + assert s.nbytes == 24 + assert isinstance(s.array, _cls(dtype)) + + +@for_date_and_time +@pytest.mark.parametrize( + "left,op,right,expected", + [ + ([1, 2], "==", [1, 2], [True, True]), + ([1, 2], "==", [1, 3], [True, False]), + ([1, 3], "<=", [1, 2], [True, False]), + ([1, 2], "<=", [1, 3], [True, True]), + ([1, 3], ">=", [1, 2], [True, True]), + ([1, 2], ">=", [1, 3], [True, False]), + ], +) +def test_timearray_comparisons( + dtype, + left, + op, + right, + expected, + comparisons={ + "==": (lambda a, b: a == b), + ">=": (lambda a, b: a >= b), + "<=": (lambda a, b: a <= b), + }, + complements={ + "==": (lambda a, b: a != b), + ">=": (lambda a, b: a < b), + "<=": (lambda a, b: a > b), + }, +): + sample_values = SAMPLE_VALUES[dtype] + left = [sample_values[index] for index in left] + right = [sample_values[index] for index in right] + left = _cls(dtype)._from_sequence(left) + right = _cls(dtype)._from_sequence(right) + right_obs = np.array(list(right)) + expected = np.array(expected) + for r in right, right_obs: + # Note that the right_obs comparisons work because + # they're called on right_obs rather then left, because + # TimeArrays only support comparisons with TimeArrays. + assert np.array_equal(comparisons[op](left, r), expected) + assert np.array_equal(complements[op](left, r), ~expected) + + # Bad shape + for bad_shape in ([], [1, 2, 3]): + if op == "==": + assert not comparisons[op](left, np.array(bad_shape)) + assert complements[op](left, np.array(bad_shape)) + else: + with pytest.raises( + ValueError, match="operands could not be broadcast together", + ): + comparisons[op](left, np.array(bad_shape)) + with pytest.raises( + ValueError, match="operands could not be broadcast together", + ): + complements[op](left, np.array(bad_shape)) + + # Bad items + for bad_items in ( + [1, 2], + [1], # a single-element array gets broadcast + ): + if op == "==": + assert np.array_equal( + comparisons[op](left, np.array(bad_items)), np.array([False, False]) + ) + assert np.array_equal( + complements[op](left, np.array(bad_items)), np.array([True, True]) + ) + else: + # Can't compare orderings times and ints: + with pytest.raises(TypeError, match="not supported"): + comparisons[op](left, np.array(bad_items)) + with pytest.raises(TypeError, match="not supported"): + complements[op](left, np.array(bad_items)) + + +@for_date_and_time +def test___getitem___arrayindex(dtype): + cls = _cls(dtype) + sample_values = SAMPLE_VALUES[dtype] + assert np.array_equal( + cls(sample_values)[[1, 3]], cls([sample_values[1], sample_values[3]]), + ) + + +@for_date_and_time +def test_timearray_slicing(dtype): + a = _make_one(dtype) + b = a[:] + assert b is not a + assert b.__class__ == a.__class__ + assert np.array_equal(b, a) + + sample_values = SAMPLE_VALUES[dtype] + cls = _cls(dtype) + assert np.array_equal(a[:1], cls._from_sequence(sample_values[:1])) + + # Assignment works: + a[:1] = cls._from_sequence([sample_values[2]]) + assert np.array_equal( + a[:2], cls._from_sequence([sample_values[2], sample_values[1]]) + ) + + # Series also work: + s = pd.Series(SAMPLE_RAW_VALUES[dtype], dtype=dtype) + assert np.array_equal(s[:1].array, cls._from_sequence([sample_values[0]])) + + +@for_date_and_time +def test_item_assignment(dtype): + a = _make_one(dtype)[:2] + sample_values = SAMPLE_VALUES[dtype] + cls = _cls(dtype) + a[0] = sample_values[2] + assert np.array_equal(a, cls._from_sequence([sample_values[2], sample_values[1]])) + a[1] = None + assert np.array_equal(a, cls._from_sequence([sample_values[2], None])) + + +@for_date_and_time +def test_array_assignment(dtype): + a = _make_one(dtype) + cls = _cls(dtype) + sample_values = SAMPLE_VALUES[dtype] + a[a.isna()] = sample_values[3] + assert np.array_equal(a, cls([sample_values[i] for i in (0, 1, 3)])) + a[[0, 2]] = sample_values[2] + assert np.array_equal(a, cls([sample_values[i] for i in (2, 1, 2)])) + + +@for_date_and_time +def test_repeat(dtype): + cls = _cls(dtype) + sample_values = SAMPLE_VALUES[dtype] + a = cls._from_sequence(sample_values).repeat(3) + assert list(a) == sorted(sample_values * 3) + + +@for_date_and_time +def test_copy(dtype): + cls = _cls(dtype) + sample_values = SAMPLE_VALUES[dtype] + a = cls._from_sequence(sample_values) + b = a.copy() + assert b is not a + assert b._ndarray is not a._ndarray + assert np.array_equal(b, a) + + +@for_date_and_time +def test_from_ndarray_copy(dtype): + cls = _cls(dtype) + sample_values = SAMPLE_VALUES[dtype] + a = cls._from_sequence(sample_values) + b = cls(a._ndarray, copy=True) + assert b._ndarray is not a._ndarray + assert np.array_equal(b, a) + + +@for_date_and_time +def test__from_factorized(dtype): + sample_values = SAMPLE_VALUES[dtype] + a = _cls(dtype)(sample_values * 2) + codes, b = a.factorize() + assert b.__class__ is a.__class__ + assert [b[code] for code in codes] == list(a) + + +@for_date_and_time +def test_isna(dtype): + a = _make_one(dtype) + assert list(a.isna()) == [False, False, True] + + +@for_date_and_time +def test__validate_scalar_invalid(dtype): + with pytest.raises(ValueError): + _make_one(dtype)._validate_scalar("bad") + + +@for_date_and_time +@pytest.mark.parametrize( + "allow_fill, fill_value", + [ + (False, None), + (True, None), + (True, pd._libs.NaT if pd else None), + (True, np.NaN if pd else None), + (True, 42), + ], +) +def test_take(dtype, allow_fill, fill_value): + sample_values = SAMPLE_VALUES[dtype] + a = _cls(dtype)(sample_values) + if allow_fill: + if fill_value == 42: + fill_value = expected_fill = ( + datetime.date(1971, 4, 2) + if dtype == "date" + else datetime.time(0, 42, 42, 424242) + ) + else: + expected_fill = None + b = a.take([1, -1, 3], allow_fill=True, fill_value=fill_value) + expect = [sample_values[1], expected_fill, sample_values[3]] + else: + b = a.take([1, -4, 3]) + expect = [sample_values[1], sample_values[-4], sample_values[3]] + + assert list(b) == expect + + +@for_date_and_time +def test_take_bad_index(dtype): + # When allow_fill is set, negative indexes < -1 raise ValueError. + # This is based on testing with an integer series/array. + # The documentation isn't clear on this at all. + sample_values = SAMPLE_VALUES[dtype] + a = _cls(dtype)(sample_values) + with pytest.raises(ValueError): + a.take([1, -2, 3], allow_fill=True, fill_value=None) + + +@for_date_and_time +def test__concat_same_type_via_concat(dtype): + sample_values = SAMPLE_VALUES[dtype] + s1 = pd.Series(sample_values[:2], dtype=dtype) + s2 = pd.Series(sample_values[2:], dtype=dtype) + assert tuple(pd.concat((s1, s2))) == sample_values + + +@for_date_and_time +def test__concat_same_type_not_same_type(dtype): + # Test a dtype-compatibility in _concat_same_type. + # This seems not to be needed in practice, because higher-level + # convatenation code detects multiple dtypes and casts to a common + # type, however, having the check seems hygienic. :) + sample_values = SAMPLE_VALUES[dtype] + s1 = pd.Series(sample_values[:2], dtype=dtype) + s2 = pd.Series(sample_values[2:]) + with pytest.raises(ValueError): + s1.array._concat_same_type((s1.array, s2.array)) + + +@for_date_and_time +def test_dropna(dtype): + assert np.array_equal(_make_one(dtype).dropna(), _make_one(dtype)[:2]) + + +@pytest.mark.parametrize( + "value, meth, limit, expect", + [ + (1, None, None, [0, 1, 1, 3]), + ([0, 2, 1, 0], None, None, [0, 2, 1, 3]), + (None, "backfill", None, [0, 3, 3, 3]), + (None, "bfill", None, [0, 3, 3, 3]), + (None, "pad", None, [0, 0, 0, 3]), + (None, "ffill", None, [0, 0, 0, 3]), + (None, "backfill", 1, [0, None, 3, 3]), + (None, "bfill", 1, [0, None, 3, 3]), + (None, "pad", 1, [0, 0, None, 3]), + (None, "ffill", 1, [0, 0, None, 3]), + ], +) +@for_date_and_time +def test_fillna(dtype, value, meth, limit, expect): + cls = _cls(dtype) + sample_values = SAMPLE_VALUES[dtype] + a = cls([sample_values[0], None, None, sample_values[3]]) + if isinstance(value, list): + value = cls([sample_values[i] for i in value]) + elif value is not None: + value = sample_values[value] + expect = cls([None if i is None else sample_values[i] for i in expect]) + assert np.array_equal(a.fillna(value, meth, limit), expect) + + +@for_date_and_time +def test_unique(dtype): + cls = _cls(dtype) + sample_values = SAMPLE_VALUES[dtype] + assert np.array_equal(cls(sample_values * 3).unique(), cls(sample_values),) + + +@for_date_and_time +def test_argsort(dtype): + sample_values = SAMPLE_VALUES[dtype] + s = pd.Series(sample_values * 2, dtype=dtype).argsort() + assert list(s) == [0, 4, 1, 5, 2, 6, 3, 7] + + +@for_date_and_time +def test_astype_copy(dtype): + a = _make_one(dtype) + b = a.astype(a.dtype, copy=True) + assert b is not a + assert b.__class__ is a.__class__ + assert np.array_equal(b, a) + + +@pytest.mark.parametrize( + "dtype, same", + [ + ("= (1, 2): + assert ( + a.median() == datetime.time(1, 2, 4) + if dtype == "time" + else datetime.date(2021, 2, 3) + ) + + empty = cls([]) + assert empty.min() is None + assert empty.max() is None + if pandas_release >= (1, 2): + assert empty.median() is None + empty = cls([None]) + assert empty.min() is None + assert empty.max() is None + assert empty.min(skipna=False) is None + assert empty.max(skipna=False) is None + if pandas_release >= (1, 2): + assert empty.median() is None + assert empty.median(skipna=False) is None + + a = _make_one(dtype) + assert a.min() == sample_values[0] + assert a.max() == sample_values[1] + if pandas_release >= (1, 2): + assert ( + a.median() == datetime.time(1, 2, 2, 750000) + if dtype == "time" + else datetime.date(2021, 2, 2) + ) + + +def test_date_add(): + dates = _cls("date")(SAMPLE_VALUES["date"]) + times = _cls("time")(SAMPLE_VALUES["time"]) + expect = dates.astype("datetime64") + times.astype("timedelta64") + + assert np.array_equal(dates + times, expect) + assert np.array_equal(times + dates, expect) + + do = pd.DateOffset(days=1) + expect = dates.astype("object") + do + assert np.array_equal(dates + do, expect) + if pandas_release >= (1, 1): + assert np.array_equal(do + dates, expect) + + with pytest.raises(TypeError): + dates + times.astype("timedelta64") + with pytest.raises(TypeError): + dates + 42 + + # When these are in series, we get handling of differing lengths + # and date offsets for free: + dates = pd.Series(dates) + times = pd.Series(times) + expect = dates.astype("datetime64") + times.astype("timedelta64")[:2] + assert np.array_equal(dates + times[:2], expect, equal_nan=True) + assert np.array_equal(times[:2] + dates, expect, equal_nan=True) + + do = pd.Series([pd.DateOffset(days=i) for i in range(4)]) + expect = dates.astype("object") + do + assert np.array_equal(dates + do, expect) + assert np.array_equal(do + dates, expect) + + +def test_date_sub(): + dates = _cls("date")(SAMPLE_VALUES["date"]) + dates2 = _cls("date")( + ( + datetime.date(2021, 1, 2), + datetime.date(2021, 1, 3), + datetime.date(2021, 1, 4), + datetime.date(2021, 1, 5), + ) + ) + expect = dates.astype("datetime64") - dates2.astype("datetime64") + assert np.array_equal(dates - dates2, expect) + + do = pd.DateOffset(days=1) + expect = dates.astype("object") - do + assert np.array_equal(dates - do, expect) + + with pytest.raises(TypeError): + dates - 42 + + # When these are in series, we get handling of differing lengths + # and date offsets for free: + dates = pd.Series(dates) + dates2 = pd.Series(dates2) + expect = dates.astype("datetime64") - dates2.astype("datetime64")[:2] + assert np.array_equal(dates - dates2[:2], expect, equal_nan=True) + + do = pd.Series([pd.DateOffset(days=i) for i in range(4)]) + expect = dates.astype("object") - do + assert np.array_equal(dates - do, expect) From 2c8a0beac2f2b6e9f2cab88fe629d92c2031104a Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Tue, 21 Sep 2021 14:07:05 -0500 Subject: [PATCH 004/210] chore: add header to owlbot config --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 7b6cc31057ef..e57856d16582 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -1,3 +1,17 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest digest: sha256:a3a85c2e0b3293068e47b1635b178f7e3d3845f2cfb8722de6713d4bbafdcd1d From 40e94a23bfcaf485baa8a5fde4c940fee3f71550 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Tue, 21 Sep 2021 14:11:39 -0500 Subject: [PATCH 005/210] chore: add license headers to __init__.py files --- packages/db-dtypes/samples/__init__.py | 13 +++++++++ .../db-dtypes/samples/snippets/__init__.py | 27 +++++++------------ 2 files changed, 23 insertions(+), 17 deletions(-) diff --git a/packages/db-dtypes/samples/__init__.py b/packages/db-dtypes/samples/__init__.py index e69de29bb2d1..7e1ec16ec8cc 100644 --- a/packages/db-dtypes/samples/__init__.py +++ b/packages/db-dtypes/samples/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/packages/db-dtypes/samples/snippets/__init__.py b/packages/db-dtypes/samples/snippets/__init__.py index fa3a9cd632b9..7e1ec16ec8cc 100644 --- a/packages/db-dtypes/samples/snippets/__init__.py +++ b/packages/db-dtypes/samples/snippets/__init__.py @@ -1,20 +1,13 @@ -# Copyright (c) 2021 The sqlalchemy-bigquery Authors +# Copyright 2021 Google LLC # -# Permission is hereby granted, free of charge, to any person obtaining a copy of -# this software and associated documentation files (the "Software"), to deal in -# the Software without restriction, including without limitation the rights to -# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -# the Software, and to permit persons to whom the Software is furnished to do so, -# subject to the following conditions: +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at # -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. +# http://www.apache.org/licenses/LICENSE-2.0 # -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -__version__ = "1.0.0-a1" +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. From 13e5b849bc227c7931e75afc5473c6c3310351e4 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Thu, 23 Sep 2021 12:20:51 -0500 Subject: [PATCH 006/210] test: cleanup test sessions (#1) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * test: disable system test session * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * add pandas to intersphinx * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * actually use dependencies * add constraints * fix conflict with numpy * bump minimum packaging Co-authored-by: Owl Bot --- .../.github/ISSUE_TEMPLATE/bug_report.md | 2 +- .../.github/PULL_REQUEST_TEMPLATE.md | 2 +- .../db-dtypes/.github/header-checker-lint.yml | 14 ++-- packages/db-dtypes/.kokoro/build.sh | 2 +- .../db-dtypes/.kokoro/continuous/common.cfg | 4 +- .../.kokoro/continuous/continuous.cfg | 2 +- packages/db-dtypes/.kokoro/docs/common.cfg | 6 +- .../db-dtypes/.kokoro/docs/docs-presubmit.cfg | 2 +- packages/db-dtypes/.kokoro/docs/docs.cfg | 2 +- .../db-dtypes/.kokoro/presubmit/common.cfg | 4 +- .../db-dtypes/.kokoro/presubmit/presubmit.cfg | 2 +- packages/db-dtypes/.kokoro/release.sh | 2 +- packages/db-dtypes/.kokoro/release/common.cfg | 4 +- .../db-dtypes/.kokoro/release/release.cfg | 2 +- .../db-dtypes/.kokoro/samples/lint/common.cfg | 4 +- .../.kokoro/samples/lint/continuous.cfg | 2 +- .../.kokoro/samples/lint/periodic.cfg | 2 +- .../.kokoro/samples/lint/presubmit.cfg | 2 +- .../.kokoro/samples/python3.6/common.cfg | 4 +- .../.kokoro/samples/python3.6/continuous.cfg | 1 + .../samples/python3.6/periodic-head.cfg | 2 +- .../.kokoro/samples/python3.6/periodic.cfg | 2 +- .../.kokoro/samples/python3.6/presubmit.cfg | 2 +- .../.kokoro/samples/python3.7/common.cfg | 4 +- .../.kokoro/samples/python3.7/continuous.cfg | 2 +- .../samples/python3.7/periodic-head.cfg | 2 +- .../.kokoro/samples/python3.7/periodic.cfg | 2 +- .../.kokoro/samples/python3.7/presubmit.cfg | 2 +- .../.kokoro/samples/python3.8/common.cfg | 4 +- .../.kokoro/samples/python3.8/continuous.cfg | 2 +- .../samples/python3.8/periodic-head.cfg | 2 +- .../.kokoro/samples/python3.8/periodic.cfg | 2 +- .../.kokoro/samples/python3.8/presubmit.cfg | 2 +- .../.kokoro/samples/python3.9/common.cfg | 4 +- .../.kokoro/samples/python3.9/continuous.cfg | 2 +- .../samples/python3.9/periodic-head.cfg | 2 +- .../.kokoro/samples/python3.9/periodic.cfg | 2 +- .../.kokoro/samples/python3.9/presubmit.cfg | 2 +- .../.kokoro/test-samples-against-head.sh | 2 +- packages/db-dtypes/.kokoro/test-samples.sh | 2 +- packages/db-dtypes/.kokoro/trampoline.sh | 2 +- packages/db-dtypes/CODE_OF_CONDUCT.md | 2 +- packages/db-dtypes/CONTRIBUTING.rst | 28 +++---- packages/db-dtypes/db_dtypes/version.py | 2 +- .../db-dtypes/docs/_templates/layout.html | 4 +- packages/db-dtypes/docs/conf.py | 3 +- packages/db-dtypes/noxfile.py | 83 ++----------------- packages/db-dtypes/owlbot.py | 4 +- .../scripts/readme-gen/readme_gen.py | 20 ++--- .../readme-gen/templates/README.tmpl.rst | 2 +- packages/db-dtypes/setup.cfg | 8 -- packages/db-dtypes/setup.py | 10 +-- packages/db-dtypes/testing/.gitignore | 2 +- .../db-dtypes/testing/constraints-3.6.txt | 8 +- .../db-dtypes/testing/constraints-3.7.txt | 1 + .../db-dtypes/testing/constraints-3.8.txt | 2 +- 56 files changed, 106 insertions(+), 186 deletions(-) diff --git a/packages/db-dtypes/.github/ISSUE_TEMPLATE/bug_report.md b/packages/db-dtypes/.github/ISSUE_TEMPLATE/bug_report.md index 864681c10f70..9cf101f8b340 100644 --- a/packages/db-dtypes/.github/ISSUE_TEMPLATE/bug_report.md +++ b/packages/db-dtypes/.github/ISSUE_TEMPLATE/bug_report.md @@ -10,7 +10,7 @@ Thanks for stopping by to let us know something could be better! Please run down the following list and make sure you've tried the usual "quick fixes": - - Search the issues already opened: https://github.com/googleapis/python-bigquery-sqlalchemy/issues + - Search the issues already opened: https://github.com/googleapis/python-db-dtypes-pandas/issues - Search StackOverflow: https://stackoverflow.com/questions/tagged/google-cloud-platform+python If you are still having issues, please be sure to include as much information as possible: diff --git a/packages/db-dtypes/.github/PULL_REQUEST_TEMPLATE.md b/packages/db-dtypes/.github/PULL_REQUEST_TEMPLATE.md index 029dc14b7ca7..710f27b75a8c 100644 --- a/packages/db-dtypes/.github/PULL_REQUEST_TEMPLATE.md +++ b/packages/db-dtypes/.github/PULL_REQUEST_TEMPLATE.md @@ -1,5 +1,5 @@ Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: -- [ ] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/python-bigquery-sqlalchemy/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea +- [ ] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/python-db-dtypes-pandas/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea - [ ] Ensure the tests and linter pass - [ ] Code coverage does not decrease (if any source code was changed) - [ ] Appropriate docs were updated (if necessary) diff --git a/packages/db-dtypes/.github/header-checker-lint.yml b/packages/db-dtypes/.github/header-checker-lint.yml index 3058bec338cb..6fe78aa7987a 100644 --- a/packages/db-dtypes/.github/header-checker-lint.yml +++ b/packages/db-dtypes/.github/header-checker-lint.yml @@ -2,14 +2,14 @@ "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"], "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt", "**/__init__.py", "samples/**/constraints.txt", "samples/**/constraints-test.txt"], "sourceFileExtensions": [ - "ts", - "js", - "java", - "sh", - "Dockerfile", - "yaml", + "ts", + "js", + "java", + "sh", + "Dockerfile", + "yaml", "py", "html", "txt" ] -} +} \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/build.sh b/packages/db-dtypes/.kokoro/build.sh index 2a2874e59f66..9f80c3478aa8 100755 --- a/packages/db-dtypes/.kokoro/build.sh +++ b/packages/db-dtypes/.kokoro/build.sh @@ -16,7 +16,7 @@ set -eo pipefail if [[ -z "${PROJECT_ROOT:-}" ]]; then - PROJECT_ROOT="github/python-bigquery-sqlalchemy" + PROJECT_ROOT="github/python-db-dtypes-pandas" fi cd "${PROJECT_ROOT}" diff --git a/packages/db-dtypes/.kokoro/continuous/common.cfg b/packages/db-dtypes/.kokoro/continuous/common.cfg index 8c42ee6dc949..b40f00c767c6 100644 --- a/packages/db-dtypes/.kokoro/continuous/common.cfg +++ b/packages/db-dtypes/.kokoro/continuous/common.cfg @@ -14,7 +14,7 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" # Use the trampoline script to run in docker. -build_file: "python-bigquery-sqlalchemy/.kokoro/trampoline.sh" +build_file: "python-db-dtypes-pandas/.kokoro/trampoline.sh" # Configure the docker image for kokoro-trampoline. env_vars: { @@ -23,5 +23,5 @@ env_vars: { } env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery-sqlalchemy/.kokoro/build.sh" + value: "github/python-db-dtypes-pandas/.kokoro/build.sh" } diff --git a/packages/db-dtypes/.kokoro/continuous/continuous.cfg b/packages/db-dtypes/.kokoro/continuous/continuous.cfg index 18a4c35325b8..8f43917d92fe 100644 --- a/packages/db-dtypes/.kokoro/continuous/continuous.cfg +++ b/packages/db-dtypes/.kokoro/continuous/continuous.cfg @@ -1 +1 @@ -# Format: //devtools/kokoro/config/proto/build.proto +# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/docs/common.cfg b/packages/db-dtypes/.kokoro/docs/common.cfg index a10adabc0685..abd7ad73d60d 100644 --- a/packages/db-dtypes/.kokoro/docs/common.cfg +++ b/packages/db-dtypes/.kokoro/docs/common.cfg @@ -11,7 +11,7 @@ action { gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-bigquery-sqlalchemy/.kokoro/trampoline_v2.sh" +build_file: "python-db-dtypes-pandas/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { @@ -20,7 +20,7 @@ env_vars: { } env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery-sqlalchemy/.kokoro/publish-docs.sh" + value: "github/python-db-dtypes-pandas/.kokoro/publish-docs.sh" } env_vars: { @@ -62,4 +62,4 @@ before_action { keyname: "docuploader_service_account" } } -} +} \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/docs/docs-presubmit.cfg b/packages/db-dtypes/.kokoro/docs/docs-presubmit.cfg index 84d915515829..05d5574fc945 100644 --- a/packages/db-dtypes/.kokoro/docs/docs-presubmit.cfg +++ b/packages/db-dtypes/.kokoro/docs/docs-presubmit.cfg @@ -18,7 +18,7 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery-sqlalchemy/.kokoro/build.sh" + value: "github/python-db-dtypes-pandas/.kokoro/build.sh" } # Only run this nox session. diff --git a/packages/db-dtypes/.kokoro/docs/docs.cfg b/packages/db-dtypes/.kokoro/docs/docs.cfg index 18a4c35325b8..8f43917d92fe 100644 --- a/packages/db-dtypes/.kokoro/docs/docs.cfg +++ b/packages/db-dtypes/.kokoro/docs/docs.cfg @@ -1 +1 @@ -# Format: //devtools/kokoro/config/proto/build.proto +# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/presubmit/common.cfg b/packages/db-dtypes/.kokoro/presubmit/common.cfg index 8c42ee6dc949..b40f00c767c6 100644 --- a/packages/db-dtypes/.kokoro/presubmit/common.cfg +++ b/packages/db-dtypes/.kokoro/presubmit/common.cfg @@ -14,7 +14,7 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" # Use the trampoline script to run in docker. -build_file: "python-bigquery-sqlalchemy/.kokoro/trampoline.sh" +build_file: "python-db-dtypes-pandas/.kokoro/trampoline.sh" # Configure the docker image for kokoro-trampoline. env_vars: { @@ -23,5 +23,5 @@ env_vars: { } env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery-sqlalchemy/.kokoro/build.sh" + value: "github/python-db-dtypes-pandas/.kokoro/build.sh" } diff --git a/packages/db-dtypes/.kokoro/presubmit/presubmit.cfg b/packages/db-dtypes/.kokoro/presubmit/presubmit.cfg index 18a4c35325b8..8f43917d92fe 100644 --- a/packages/db-dtypes/.kokoro/presubmit/presubmit.cfg +++ b/packages/db-dtypes/.kokoro/presubmit/presubmit.cfg @@ -1 +1 @@ -# Format: //devtools/kokoro/config/proto/build.proto +# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/release.sh b/packages/db-dtypes/.kokoro/release.sh index 75b7532fe70d..297fec580330 100755 --- a/packages/db-dtypes/.kokoro/release.sh +++ b/packages/db-dtypes/.kokoro/release.sh @@ -27,6 +27,6 @@ export PYTHONUNBUFFERED=1 # Move into the package, build the distribution and upload. TWINE_PASSWORD=$(cat "${KOKORO_GFILE_DIR}/secret_manager/google-cloud-pypi-token") -cd github/python-bigquery-sqlalchemy +cd github/python-db-dtypes-pandas python3 setup.py sdist bdist_wheel twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/packages/db-dtypes/.kokoro/release/common.cfg b/packages/db-dtypes/.kokoro/release/common.cfg index 8f3be12645db..5917933f727e 100644 --- a/packages/db-dtypes/.kokoro/release/common.cfg +++ b/packages/db-dtypes/.kokoro/release/common.cfg @@ -11,7 +11,7 @@ action { gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-bigquery-sqlalchemy/.kokoro/trampoline.sh" +build_file: "python-db-dtypes-pandas/.kokoro/trampoline.sh" # Configure the docker image for kokoro-trampoline. env_vars: { @@ -20,7 +20,7 @@ env_vars: { } env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery-sqlalchemy/.kokoro/release.sh" + value: "github/python-db-dtypes-pandas/.kokoro/release.sh" } # Tokens needed to report release status back to GitHub diff --git a/packages/db-dtypes/.kokoro/release/release.cfg b/packages/db-dtypes/.kokoro/release/release.cfg index 18a4c35325b8..8f43917d92fe 100644 --- a/packages/db-dtypes/.kokoro/release/release.cfg +++ b/packages/db-dtypes/.kokoro/release/release.cfg @@ -1 +1 @@ -# Format: //devtools/kokoro/config/proto/build.proto +# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/lint/common.cfg b/packages/db-dtypes/.kokoro/samples/lint/common.cfg index 35ebc5d2663e..51b06c3ae843 100644 --- a/packages/db-dtypes/.kokoro/samples/lint/common.cfg +++ b/packages/db-dtypes/.kokoro/samples/lint/common.cfg @@ -15,7 +15,7 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery-sqlalchemy/.kokoro/test-samples.sh" + value: "github/python-db-dtypes-pandas/.kokoro/test-samples.sh" } # Configure the docker image for kokoro-trampoline. @@ -31,4 +31,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-bigquery-sqlalchemy/.kokoro/trampoline.sh" +build_file: "python-db-dtypes-pandas/.kokoro/trampoline.sh" \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/lint/continuous.cfg b/packages/db-dtypes/.kokoro/samples/lint/continuous.cfg index b196817872e9..a1c8d9759c88 100644 --- a/packages/db-dtypes/.kokoro/samples/lint/continuous.cfg +++ b/packages/db-dtypes/.kokoro/samples/lint/continuous.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "True" -} +} \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/lint/periodic.cfg b/packages/db-dtypes/.kokoro/samples/lint/periodic.cfg index 71cd1e597e38..50fec9649732 100644 --- a/packages/db-dtypes/.kokoro/samples/lint/periodic.cfg +++ b/packages/db-dtypes/.kokoro/samples/lint/periodic.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "False" -} +} \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/lint/presubmit.cfg b/packages/db-dtypes/.kokoro/samples/lint/presubmit.cfg index b196817872e9..a1c8d9759c88 100644 --- a/packages/db-dtypes/.kokoro/samples/lint/presubmit.cfg +++ b/packages/db-dtypes/.kokoro/samples/lint/presubmit.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "True" -} +} \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.6/common.cfg b/packages/db-dtypes/.kokoro/samples/python3.6/common.cfg index 2ff740aad0a8..f5d0d5e11448 100644 --- a/packages/db-dtypes/.kokoro/samples/python3.6/common.cfg +++ b/packages/db-dtypes/.kokoro/samples/python3.6/common.cfg @@ -21,7 +21,7 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery-sqlalchemy/.kokoro/test-samples.sh" + value: "github/python-db-dtypes-pandas/.kokoro/test-samples.sh" } # Configure the docker image for kokoro-trampoline. @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-bigquery-sqlalchemy/.kokoro/trampoline.sh" +build_file: "python-db-dtypes-pandas/.kokoro/trampoline.sh" \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.6/continuous.cfg b/packages/db-dtypes/.kokoro/samples/python3.6/continuous.cfg index b196817872e9..7218af1499e5 100644 --- a/packages/db-dtypes/.kokoro/samples/python3.6/continuous.cfg +++ b/packages/db-dtypes/.kokoro/samples/python3.6/continuous.cfg @@ -4,3 +4,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "True" } + diff --git a/packages/db-dtypes/.kokoro/samples/python3.6/periodic-head.cfg b/packages/db-dtypes/.kokoro/samples/python3.6/periodic-head.cfg index abf3481d14dd..ee3d56408db9 100644 --- a/packages/db-dtypes/.kokoro/samples/python3.6/periodic-head.cfg +++ b/packages/db-dtypes/.kokoro/samples/python3.6/periodic-head.cfg @@ -7,5 +7,5 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery-sqlalchemy/.kokoro/test-samples-against-head.sh" + value: "github/python-db-dtypes-pandas/.kokoro/test-samples-against-head.sh" } diff --git a/packages/db-dtypes/.kokoro/samples/python3.6/periodic.cfg b/packages/db-dtypes/.kokoro/samples/python3.6/periodic.cfg index 71cd1e597e38..50fec9649732 100644 --- a/packages/db-dtypes/.kokoro/samples/python3.6/periodic.cfg +++ b/packages/db-dtypes/.kokoro/samples/python3.6/periodic.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "False" -} +} \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.6/presubmit.cfg b/packages/db-dtypes/.kokoro/samples/python3.6/presubmit.cfg index b196817872e9..a1c8d9759c88 100644 --- a/packages/db-dtypes/.kokoro/samples/python3.6/presubmit.cfg +++ b/packages/db-dtypes/.kokoro/samples/python3.6/presubmit.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "True" -} +} \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.7/common.cfg b/packages/db-dtypes/.kokoro/samples/python3.7/common.cfg index e5444b7774b4..86401fadc1bb 100644 --- a/packages/db-dtypes/.kokoro/samples/python3.7/common.cfg +++ b/packages/db-dtypes/.kokoro/samples/python3.7/common.cfg @@ -21,7 +21,7 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery-sqlalchemy/.kokoro/test-samples.sh" + value: "github/python-db-dtypes-pandas/.kokoro/test-samples.sh" } # Configure the docker image for kokoro-trampoline. @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-bigquery-sqlalchemy/.kokoro/trampoline.sh" +build_file: "python-db-dtypes-pandas/.kokoro/trampoline.sh" \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.7/continuous.cfg b/packages/db-dtypes/.kokoro/samples/python3.7/continuous.cfg index b196817872e9..a1c8d9759c88 100644 --- a/packages/db-dtypes/.kokoro/samples/python3.7/continuous.cfg +++ b/packages/db-dtypes/.kokoro/samples/python3.7/continuous.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "True" -} +} \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.7/periodic-head.cfg b/packages/db-dtypes/.kokoro/samples/python3.7/periodic-head.cfg index abf3481d14dd..ee3d56408db9 100644 --- a/packages/db-dtypes/.kokoro/samples/python3.7/periodic-head.cfg +++ b/packages/db-dtypes/.kokoro/samples/python3.7/periodic-head.cfg @@ -7,5 +7,5 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery-sqlalchemy/.kokoro/test-samples-against-head.sh" + value: "github/python-db-dtypes-pandas/.kokoro/test-samples-against-head.sh" } diff --git a/packages/db-dtypes/.kokoro/samples/python3.7/periodic.cfg b/packages/db-dtypes/.kokoro/samples/python3.7/periodic.cfg index 71cd1e597e38..50fec9649732 100644 --- a/packages/db-dtypes/.kokoro/samples/python3.7/periodic.cfg +++ b/packages/db-dtypes/.kokoro/samples/python3.7/periodic.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "False" -} +} \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.7/presubmit.cfg b/packages/db-dtypes/.kokoro/samples/python3.7/presubmit.cfg index b196817872e9..a1c8d9759c88 100644 --- a/packages/db-dtypes/.kokoro/samples/python3.7/presubmit.cfg +++ b/packages/db-dtypes/.kokoro/samples/python3.7/presubmit.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "True" -} +} \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.8/common.cfg b/packages/db-dtypes/.kokoro/samples/python3.8/common.cfg index 1cfbee76caa5..e461be16d4f9 100644 --- a/packages/db-dtypes/.kokoro/samples/python3.8/common.cfg +++ b/packages/db-dtypes/.kokoro/samples/python3.8/common.cfg @@ -21,7 +21,7 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery-sqlalchemy/.kokoro/test-samples.sh" + value: "github/python-db-dtypes-pandas/.kokoro/test-samples.sh" } # Configure the docker image for kokoro-trampoline. @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-bigquery-sqlalchemy/.kokoro/trampoline.sh" +build_file: "python-db-dtypes-pandas/.kokoro/trampoline.sh" \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.8/continuous.cfg b/packages/db-dtypes/.kokoro/samples/python3.8/continuous.cfg index b196817872e9..a1c8d9759c88 100644 --- a/packages/db-dtypes/.kokoro/samples/python3.8/continuous.cfg +++ b/packages/db-dtypes/.kokoro/samples/python3.8/continuous.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "True" -} +} \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.8/periodic-head.cfg b/packages/db-dtypes/.kokoro/samples/python3.8/periodic-head.cfg index abf3481d14dd..ee3d56408db9 100644 --- a/packages/db-dtypes/.kokoro/samples/python3.8/periodic-head.cfg +++ b/packages/db-dtypes/.kokoro/samples/python3.8/periodic-head.cfg @@ -7,5 +7,5 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery-sqlalchemy/.kokoro/test-samples-against-head.sh" + value: "github/python-db-dtypes-pandas/.kokoro/test-samples-against-head.sh" } diff --git a/packages/db-dtypes/.kokoro/samples/python3.8/periodic.cfg b/packages/db-dtypes/.kokoro/samples/python3.8/periodic.cfg index 71cd1e597e38..50fec9649732 100644 --- a/packages/db-dtypes/.kokoro/samples/python3.8/periodic.cfg +++ b/packages/db-dtypes/.kokoro/samples/python3.8/periodic.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "False" -} +} \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.8/presubmit.cfg b/packages/db-dtypes/.kokoro/samples/python3.8/presubmit.cfg index b196817872e9..a1c8d9759c88 100644 --- a/packages/db-dtypes/.kokoro/samples/python3.8/presubmit.cfg +++ b/packages/db-dtypes/.kokoro/samples/python3.8/presubmit.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "True" -} +} \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.9/common.cfg b/packages/db-dtypes/.kokoro/samples/python3.9/common.cfg index cb8e45689a71..7d3c9417364e 100644 --- a/packages/db-dtypes/.kokoro/samples/python3.9/common.cfg +++ b/packages/db-dtypes/.kokoro/samples/python3.9/common.cfg @@ -21,7 +21,7 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery-sqlalchemy/.kokoro/test-samples.sh" + value: "github/python-db-dtypes-pandas/.kokoro/test-samples.sh" } # Configure the docker image for kokoro-trampoline. @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-bigquery-sqlalchemy/.kokoro/trampoline.sh" +build_file: "python-db-dtypes-pandas/.kokoro/trampoline.sh" \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.9/continuous.cfg b/packages/db-dtypes/.kokoro/samples/python3.9/continuous.cfg index b196817872e9..a1c8d9759c88 100644 --- a/packages/db-dtypes/.kokoro/samples/python3.9/continuous.cfg +++ b/packages/db-dtypes/.kokoro/samples/python3.9/continuous.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "True" -} +} \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.9/periodic-head.cfg b/packages/db-dtypes/.kokoro/samples/python3.9/periodic-head.cfg index abf3481d14dd..ee3d56408db9 100644 --- a/packages/db-dtypes/.kokoro/samples/python3.9/periodic-head.cfg +++ b/packages/db-dtypes/.kokoro/samples/python3.9/periodic-head.cfg @@ -7,5 +7,5 @@ env_vars: { env_vars: { key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery-sqlalchemy/.kokoro/test-samples-against-head.sh" + value: "github/python-db-dtypes-pandas/.kokoro/test-samples-against-head.sh" } diff --git a/packages/db-dtypes/.kokoro/samples/python3.9/periodic.cfg b/packages/db-dtypes/.kokoro/samples/python3.9/periodic.cfg index 71cd1e597e38..50fec9649732 100644 --- a/packages/db-dtypes/.kokoro/samples/python3.9/periodic.cfg +++ b/packages/db-dtypes/.kokoro/samples/python3.9/periodic.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "False" -} +} \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.9/presubmit.cfg b/packages/db-dtypes/.kokoro/samples/python3.9/presubmit.cfg index b196817872e9..a1c8d9759c88 100644 --- a/packages/db-dtypes/.kokoro/samples/python3.9/presubmit.cfg +++ b/packages/db-dtypes/.kokoro/samples/python3.9/presubmit.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "True" -} +} \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/test-samples-against-head.sh b/packages/db-dtypes/.kokoro/test-samples-against-head.sh index a62db989762e..9380cfbbc1cf 100755 --- a/packages/db-dtypes/.kokoro/test-samples-against-head.sh +++ b/packages/db-dtypes/.kokoro/test-samples-against-head.sh @@ -23,6 +23,6 @@ set -eo pipefail # Enables `**` to include files nested inside sub-folders shopt -s globstar -cd github/python-bigquery-sqlalchemy +cd github/python-db-dtypes-pandas exec .kokoro/test-samples-impl.sh diff --git a/packages/db-dtypes/.kokoro/test-samples.sh b/packages/db-dtypes/.kokoro/test-samples.sh index f7713a95f09f..347c6146e067 100755 --- a/packages/db-dtypes/.kokoro/test-samples.sh +++ b/packages/db-dtypes/.kokoro/test-samples.sh @@ -24,7 +24,7 @@ set -eo pipefail # Enables `**` to include files nested inside sub-folders shopt -s globstar -cd github/python-bigquery-sqlalchemy +cd github/python-db-dtypes-pandas # Run periodic samples tests at latest release if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then diff --git a/packages/db-dtypes/.kokoro/trampoline.sh b/packages/db-dtypes/.kokoro/trampoline.sh index a4241db23f41..f39236e943a8 100755 --- a/packages/db-dtypes/.kokoro/trampoline.sh +++ b/packages/db-dtypes/.kokoro/trampoline.sh @@ -25,4 +25,4 @@ function cleanup() { trap cleanup EXIT $(dirname $0)/populate-secrets.sh # Secret Manager secrets. -python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" +python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" \ No newline at end of file diff --git a/packages/db-dtypes/CODE_OF_CONDUCT.md b/packages/db-dtypes/CODE_OF_CONDUCT.md index 84ff396aef58..039f43681204 100644 --- a/packages/db-dtypes/CODE_OF_CONDUCT.md +++ b/packages/db-dtypes/CODE_OF_CONDUCT.md @@ -92,4 +92,4 @@ harassment or threats to anyone's safety, we may take action without notice. This Code of Conduct is adapted from the Contributor Covenant, version 1.4, available at -https://www.contributor-covenant.org/version/1/4/code-of-conduct.html +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/db-dtypes/CONTRIBUTING.rst b/packages/db-dtypes/CONTRIBUTING.rst index 172f9dfccb74..f066db35b18b 100644 --- a/packages/db-dtypes/CONTRIBUTING.rst +++ b/packages/db-dtypes/CONTRIBUTING.rst @@ -35,21 +35,21 @@ Using a Development Checkout You'll have to create a development environment using a Git checkout: - While logged into your GitHub account, navigate to the - ``python-bigquery-sqlalchemy`` `repo`_ on GitHub. + ``python-db-dtypes-pandas`` `repo`_ on GitHub. -- Fork and clone the ``python-bigquery-sqlalchemy`` repository to your GitHub account by +- Fork and clone the ``python-db-dtypes-pandas`` repository to your GitHub account by clicking the "Fork" button. -- Clone your fork of ``python-bigquery-sqlalchemy`` from your GitHub account to your local +- Clone your fork of ``python-db-dtypes-pandas`` from your GitHub account to your local computer, substituting your account username and specifying the destination - as ``hack-on-python-bigquery-sqlalchemy``. E.g.:: + as ``hack-on-python-db-dtypes-pandas``. E.g.:: $ cd ${HOME} - $ git clone git@github.com:USERNAME/python-bigquery-sqlalchemy.git hack-on-python-bigquery-sqlalchemy - $ cd hack-on-python-bigquery-sqlalchemy - # Configure remotes such that you can pull changes from the googleapis/python-bigquery-sqlalchemy + $ git clone git@github.com:USERNAME/python-db-dtypes-pandas.git hack-on-python-db-dtypes-pandas + $ cd hack-on-python-db-dtypes-pandas + # Configure remotes such that you can pull changes from the googleapis/python-db-dtypes-pandas # repository into your local repository. - $ git remote add upstream git@github.com:googleapis/python-bigquery-sqlalchemy.git + $ git remote add upstream git@github.com:googleapis/python-db-dtypes-pandas.git # fetch and merge changes from upstream into main $ git fetch upstream $ git merge upstream/main @@ -60,7 +60,7 @@ repo, from which you can submit a pull request. To work on the codebase and run the tests, we recommend using ``nox``, but you can also use a ``virtualenv`` of your own creation. -.. _repo: https://github.com/googleapis/python-bigquery-sqlalchemy +.. _repo: https://github.com/googleapis/python-db-dtypes-pandas Using ``nox`` ============= @@ -113,7 +113,7 @@ Coding Style export GOOGLE_CLOUD_TESTING_BRANCH="main" By doing this, you are specifying the location of the most up-to-date - version of ``python-bigquery-sqlalchemy``. The + version of ``python-db-dtypes-pandas``. The remote name ``upstream`` should point to the official ``googleapis`` checkout and the branch should be the default branch on that remote (``main``). @@ -143,12 +143,12 @@ Running System Tests $ nox -s system # Run a single system test - $ nox -s system-3.9 -- -k + $ nox -s system-3.8 -- -k .. note:: - System tests are only configured to run under Python 3.8 and 3.9. + System tests are only configured to run under Python 3.8. For expediency, we do not run them in older versions of Python 3. This alone will not run the tests. You'll need to change some local @@ -209,7 +209,7 @@ The `description on PyPI`_ for the project comes directly from the ``README``. Due to the reStructuredText (``rst``) parser used by PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` instead of -``https://github.com/googleapis/python-bigquery-sqlalchemy/blob/main/CONTRIBUTING.rst``) +``https://github.com/googleapis/python-db-dtypes-pandas/blob/main/CONTRIBUTING.rst``) may cause problems creating links or rendering the description. .. _description on PyPI: https://pypi.org/project/db-dtypes @@ -234,7 +234,7 @@ We support: Supported versions can be found in our ``noxfile.py`` `config`_. -.. _config: https://github.com/googleapis/python-bigquery-sqlalchemy/blob/main/noxfile.py +.. _config: https://github.com/googleapis/python-db-dtypes-pandas/blob/main/noxfile.py We also explicitly decided to support Python 3 beginning with version 3.6. diff --git a/packages/db-dtypes/db_dtypes/version.py b/packages/db-dtypes/db_dtypes/version.py index 450268d22912..49866fc2ae28 100644 --- a/packages/db-dtypes/db_dtypes/version.py +++ b/packages/db-dtypes/db_dtypes/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "0.0.1" +__version__ = "0.0.1a2" diff --git a/packages/db-dtypes/docs/_templates/layout.html b/packages/db-dtypes/docs/_templates/layout.html index 95e9c77fcfe1..6316a537f72b 100644 --- a/packages/db-dtypes/docs/_templates/layout.html +++ b/packages/db-dtypes/docs/_templates/layout.html @@ -20,8 +20,8 @@ {% endblock %}
-
- As of January 1, 2020 this library no longer supports Python 2 on the latest released version. +
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. Library versions released prior to that date will continue to be available. For more information please visit Python 2 support on Google Cloud.
diff --git a/packages/db-dtypes/docs/conf.py b/packages/db-dtypes/docs/conf.py index e9c68d4daf59..9482b9748266 100644 --- a/packages/db-dtypes/docs/conf.py +++ b/packages/db-dtypes/docs/conf.py @@ -156,7 +156,7 @@ html_theme_options = { "description": "Google Cloud Client Libraries for db-dtypes", "github_user": "googleapis", - "github_repo": "python-bigquery-sqlalchemy", + "github_repo": "python-db-dtypes-pandas", "github_banner": True, "font_family": "'Roboto', Georgia, sans", "head_font_family": "'Roboto', Georgia, serif", @@ -351,6 +351,7 @@ "grpc": ("https://grpc.github.io/grpc/python/", None), "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), + "pandas": ("http://pandas.pydata.org/pandas-docs/dev", None), } diff --git a/packages/db-dtypes/noxfile.py b/packages/db-dtypes/noxfile.py index 1e88b21b857f..63ac4349e3c1 100644 --- a/packages/db-dtypes/noxfile.py +++ b/packages/db-dtypes/noxfile.py @@ -28,9 +28,7 @@ BLACK_PATHS = ["docs", "db_dtypes", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" - -# We're using two Python versions to test with sqlalchemy 1.3 and 1.4. -SYSTEM_TEST_PYTHON_VERSIONS = ["3.8", "3.9"] +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -40,8 +38,6 @@ "lint", "unit", "cover", - "system", - "compliance", "lint_setup_py", "blacken", "docs", @@ -98,13 +94,7 @@ def default(session): constraints_path, ) - if session.python == "3.8": - extras = "[tests,alembic]" - elif session.python == "3.9": - extras = "[tests,geography]" - else: - extras = "[tests]" - session.install("-e", f".{extras}", "-c", constraints_path) + session.install("-e", ".", "-c", constraints_path) # Run py.test against the unit tests. session.run( @@ -156,13 +146,7 @@ def system(session): # Install all test dependencies, then install this package into the # virtualenv's dist-packages. session.install("mock", "pytest", "google-cloud-testutils", "-c", constraints_path) - if session.python == "3.8": - extras = "[tests,alembic]" - elif session.python == "3.9": - extras = "[tests,geography]" - else: - extras = "[tests]" - session.install("-e", f".{extras}", "-c", constraints_path) + session.install("-e", ".", "-c", constraints_path) # Run py.test against the system tests. if system_test_exists: @@ -183,56 +167,6 @@ def system(session): ) -@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) -def compliance(session): - """Run the SQLAlchemy dialect-compliance system tests""" - constraints_path = str( - CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" - ) - system_test_folder_path = os.path.join("tests", "sqlalchemy_dialect_compliance") - - if os.environ.get("RUN_COMPLIANCE_TESTS", "true") == "false": - session.skip("RUN_COMPLIANCE_TESTS is set to false, skipping") - if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): - session.skip("Credentials must be set via environment variable") - if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": - session.install("pyopenssl") - if not os.path.exists(system_test_folder_path): - session.skip("Compliance tests were not found") - - session.install("--pre", "grpcio") - - session.install( - "mock", - "pytest", - "pytest-rerunfailures", - "google-cloud-testutils", - "-c", - constraints_path, - ) - if session.python == "3.8": - extras = "[tests,alembic]" - elif session.python == "3.9": - extras = "[tests,geography]" - else: - extras = "[tests]" - session.install("-e", f".{extras}", "-c", constraints_path) - - session.run( - "py.test", - "-vv", - f"--junitxml=compliance_{session.python}_sponge_log.xml", - "--reruns=3", - "--reruns-delay=60", - "--only-rerun=403 Exceeded rate limits", - "--only-rerun=409 Already Exists", - "--only-rerun=404 Not found", - "--only-rerun=400 Cannot execute DML over a non-existent table", - system_test_folder_path, - *session.posargs, - ) - - @nox.session(python=DEFAULT_PYTHON_VERSION) def cover(session): """Run the final coverage report. @@ -251,9 +185,7 @@ def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install( - "sphinx==4.0.1", "alabaster", "geoalchemy2", "shapely", "recommonmark" - ) + session.install("sphinx==4.0.1", "alabaster", "recommonmark") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( @@ -276,12 +208,7 @@ def docfx(session): session.install("-e", ".") session.install( - "sphinx==4.0.1", - "alabaster", - "geoalchemy2", - "shapely", - "recommonmark", - "gcp-sphinx-docfx-yaml", + "sphinx==4.0.1", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml" ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) diff --git a/packages/db-dtypes/owlbot.py b/packages/db-dtypes/owlbot.py index 5acefc945037..dd1ed69fb597 100644 --- a/packages/db-dtypes/owlbot.py +++ b/packages/db-dtypes/owlbot.py @@ -31,6 +31,7 @@ unit_test_python_versions=["3.6", "3.7", "3.8", "3.9"], system_test_python_versions=["3.8"], cov_level=100, + intersphinx_dependencies={"pandas": "http://pandas.pydata.org/pandas-docs/dev"}, ) s.move(templated_files, excludes=["docs/multiprocessing.rst"]) @@ -65,6 +66,7 @@ def place_before(path, text, *before_text, escape=None): "nox.options.stop_on_first_error = True", ) +# There are no system tests for this package. old_sessions = """ "unit", "system", @@ -76,8 +78,6 @@ def place_before(path, text, *before_text, escape=None): "lint", "unit", "cover", - "system", - "compliance", """ s.replace(["noxfile.py"], old_sessions, new_sessions) diff --git a/packages/db-dtypes/scripts/readme-gen/readme_gen.py b/packages/db-dtypes/scripts/readme-gen/readme_gen.py index d0cc15825097..d309d6e97518 100644 --- a/packages/db-dtypes/scripts/readme-gen/readme_gen.py +++ b/packages/db-dtypes/scripts/readme-gen/readme_gen.py @@ -28,21 +28,19 @@ jinja_env = jinja2.Environment( trim_blocks=True, loader=jinja2.FileSystemLoader( - os.path.abspath(os.path.join(os.path.dirname(__file__), "templates")) - ), -) + os.path.abspath(os.path.join(os.path.dirname(__file__), 'templates')))) -README_TMPL = jinja_env.get_template("README.tmpl.rst") +README_TMPL = jinja_env.get_template('README.tmpl.rst') def get_help(file): - return subprocess.check_output(["python", file, "--help"]).decode() + return subprocess.check_output(['python', file, '--help']).decode() def main(): parser = argparse.ArgumentParser() - parser.add_argument("source") - parser.add_argument("--destination", default="README.rst") + parser.add_argument('source') + parser.add_argument('--destination', default='README.rst') args = parser.parse_args() @@ -50,9 +48,9 @@ def main(): root = os.path.dirname(source) destination = os.path.join(root, args.destination) - jinja_env.globals["get_help"] = get_help + jinja_env.globals['get_help'] = get_help - with io.open(source, "r") as f: + with io.open(source, 'r') as f: config = yaml.load(f) # This allows get_help to execute in the right directory. @@ -60,9 +58,9 @@ def main(): output = README_TMPL.render(config) - with io.open(destination, "w") as f: + with io.open(destination, 'w') as f: f.write(output) -if __name__ == "__main__": +if __name__ == '__main__': main() diff --git a/packages/db-dtypes/scripts/readme-gen/templates/README.tmpl.rst b/packages/db-dtypes/scripts/readme-gen/templates/README.tmpl.rst index 30ad03d050d8..4fd239765b0a 100644 --- a/packages/db-dtypes/scripts/readme-gen/templates/README.tmpl.rst +++ b/packages/db-dtypes/scripts/readme-gen/templates/README.tmpl.rst @@ -84,4 +84,4 @@ to `browse the source`_ and `report issues`_. {% endif %} -.. _Google Cloud SDK: https://cloud.google.com/sdk/ +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/packages/db-dtypes/setup.cfg b/packages/db-dtypes/setup.cfg index bf8e3853ba9c..c3a2b39f6528 100644 --- a/packages/db-dtypes/setup.cfg +++ b/packages/db-dtypes/setup.cfg @@ -17,11 +17,3 @@ # Generated by synthtool. DO NOT EDIT! [bdist_wheel] universal = 1 - -[sqla_testing] -requirement_cls=db_dtypes.requirements:Requirements -profile_file=.sqlalchemy_dialect_compliance-profiles.txt - -[tool:pytest] -addopts= --tb native -v -r fxX -p no:warnings -python_files=tests/*test_*.py diff --git a/packages/db-dtypes/setup.py b/packages/db-dtypes/setup.py index cd4accd19a48..8e1e3558882d 100644 --- a/packages/db-dtypes/setup.py +++ b/packages/db-dtypes/setup.py @@ -29,10 +29,10 @@ release_status = "Development Status :: 4 - Beta" dependencies = [ - "packaging >= 14.3", - "pandas", - "pyarrow", - "numpy", + "packaging >= 17.0", + "pandas >= 0.24.2, < 2.0dev", + "pyarrow >= 3.0.0, < 6.0dev", + "numpy >= 1.16.6, < 2.0dev", ] package_root = os.path.abspath(os.path.dirname(__file__)) @@ -70,7 +70,7 @@ def readme(): "Topic :: Database :: Front-Ends", ], platforms="Posix; MacOS X; Windows", - install_requires=[], + install_requires=dependencies, python_requires=">=3.6, <3.10", tests_require=["pytest"], ) diff --git a/packages/db-dtypes/testing/.gitignore b/packages/db-dtypes/testing/.gitignore index 40f47fa771e9..b05fbd630881 100644 --- a/packages/db-dtypes/testing/.gitignore +++ b/packages/db-dtypes/testing/.gitignore @@ -1,3 +1,3 @@ test-env.sh service-account.json -client-secrets.json +client-secrets.json \ No newline at end of file diff --git a/packages/db-dtypes/testing/constraints-3.6.txt b/packages/db-dtypes/testing/constraints-3.6.txt index 60421130e299..fd89d90a2e16 100644 --- a/packages/db-dtypes/testing/constraints-3.6.txt +++ b/packages/db-dtypes/testing/constraints-3.6.txt @@ -4,7 +4,7 @@ # Pin the version to the lower bound. # # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", -sqlalchemy==1.2.0 -google-auth==1.25.0 -google-cloud-bigquery==2.25.2 -google-api-core==1.30.0 +packaging==17.0 +pandas==0.24.2 +pyarrow==3.0.0 +numpy==1.16.6 diff --git a/packages/db-dtypes/testing/constraints-3.7.txt b/packages/db-dtypes/testing/constraints-3.7.txt index e69de29bb2d1..684864f2bcde 100644 --- a/packages/db-dtypes/testing/constraints-3.7.txt +++ b/packages/db-dtypes/testing/constraints-3.7.txt @@ -0,0 +1 @@ +pandas==1.1.0 diff --git a/packages/db-dtypes/testing/constraints-3.8.txt b/packages/db-dtypes/testing/constraints-3.8.txt index 4884f96af3ef..3fd8886e64d1 100644 --- a/packages/db-dtypes/testing/constraints-3.8.txt +++ b/packages/db-dtypes/testing/constraints-3.8.txt @@ -1 +1 @@ -sqlalchemy==1.3.24 +pandas==1.2.0 From 06ceb6dadfc4f67a4c6b085529cdc8fb6b285a4e Mon Sep 17 00:00:00 2001 From: Jim Fulton Date: Mon, 27 Sep 2021 11:21:38 -0400 Subject: [PATCH 007/210] fix: support Pandas 0.24 (#8) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * test: Don't use the equal_nan option of array_equal. It requires new versions of numpy * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * blacken * fix: support pandas 0.24 * blacken * remove 'stop on first error' * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot Co-authored-by: Tim Swast --- packages/db-dtypes/db_dtypes/core.py | 9 ++------- packages/db-dtypes/noxfile.py | 1 - packages/db-dtypes/owlbot.py | 15 --------------- packages/db-dtypes/tests/unit/test_dtypes.py | 12 +++++++++--- 4 files changed, 11 insertions(+), 26 deletions(-) diff --git a/packages/db-dtypes/db_dtypes/core.py b/packages/db-dtypes/db_dtypes/core.py index dd1d23ad5890..3b05ad6d5e01 100644 --- a/packages/db-dtypes/db_dtypes/core.py +++ b/packages/db-dtypes/db_dtypes/core.py @@ -17,7 +17,6 @@ import numpy import pandas from pandas._libs import NaT -from pandas._typing import Scalar import pandas.compat.numpy.function import pandas.core.algorithms import pandas.core.arrays @@ -171,18 +170,14 @@ def all( result = pandas.core.nanops.nanall(self._ndarray, axis=axis, skipna=skipna) return result - def min( - self, *, axis: Optional[int] = None, skipna: bool = True, **kwargs - ) -> Scalar: + def min(self, *, axis: Optional[int] = None, skipna: bool = True, **kwargs): pandas.compat.numpy.function.validate_min((), kwargs) result = pandas.core.nanops.nanmin( values=self._ndarray, axis=axis, mask=self.isna(), skipna=skipna ) return self._box_func(result) - def max( - self, *, axis: Optional[int] = None, skipna: bool = True, **kwargs - ) -> Scalar: + def max(self, *, axis: Optional[int] = None, skipna: bool = True, **kwargs): pandas.compat.numpy.function.validate_max((), kwargs) result = pandas.core.nanops.nanmax( values=self._ndarray, axis=axis, mask=self.isna(), skipna=skipna diff --git a/packages/db-dtypes/noxfile.py b/packages/db-dtypes/noxfile.py index 63ac4349e3c1..f2a2bedf0abe 100644 --- a/packages/db-dtypes/noxfile.py +++ b/packages/db-dtypes/noxfile.py @@ -44,7 +44,6 @@ ] # Error if a python version is missing -nox.options.stop_on_first_error = True nox.options.error_on_missing_interpreters = True diff --git a/packages/db-dtypes/owlbot.py b/packages/db-dtypes/owlbot.py index dd1ed69fb597..176e179e6c91 100644 --- a/packages/db-dtypes/owlbot.py +++ b/packages/db-dtypes/owlbot.py @@ -51,21 +51,6 @@ ["noxfile.py"], "google/cloud", "db_dtypes", ) - -def place_before(path, text, *before_text, escape=None): - replacement = "\n".join(before_text) + "\n" + text - if escape: - for c in escape: - text = text.replace(c, "\\" + c) - s.replace([path], text, replacement) - - -place_before( - "noxfile.py", - "nox.options.error_on_missing_interpreters = True", - "nox.options.stop_on_first_error = True", -) - # There are no system tests for this package. old_sessions = """ "unit", diff --git a/packages/db-dtypes/tests/unit/test_dtypes.py b/packages/db-dtypes/tests/unit/test_dtypes.py index 572915e895f9..499163978b38 100644 --- a/packages/db-dtypes/tests/unit/test_dtypes.py +++ b/packages/db-dtypes/tests/unit/test_dtypes.py @@ -58,6 +58,12 @@ for_date_and_time = pytest.mark.parametrize("dtype", ["date", "time"]) +def eq_na(a1, a2): + nna1 = pd.notna(a1) + nna2 = pd.notna(a2) + return np.array_equal(nna1, nna2) and np.array_equal(a1[nna1], a2[nna2]) + + @pytest.fixture(autouse=True) def register_dtype(): import db_dtypes # noqa @@ -575,8 +581,8 @@ def test_date_add(): dates = pd.Series(dates) times = pd.Series(times) expect = dates.astype("datetime64") + times.astype("timedelta64")[:2] - assert np.array_equal(dates + times[:2], expect, equal_nan=True) - assert np.array_equal(times[:2] + dates, expect, equal_nan=True) + assert eq_na(dates + times[:2], expect) + assert eq_na(times[:2] + dates, expect) do = pd.Series([pd.DateOffset(days=i) for i in range(4)]) expect = dates.astype("object") + do @@ -609,7 +615,7 @@ def test_date_sub(): dates = pd.Series(dates) dates2 = pd.Series(dates2) expect = dates.astype("datetime64") - dates2.astype("datetime64")[:2] - assert np.array_equal(dates - dates2[:2], expect, equal_nan=True) + assert eq_na(dates - dates2[:2], expect) do = pd.Series([pd.DateOffset(days=i) for i in range(4)]) expect = dates.astype("object") - do From 7ae7c344973c63485ac61010a6e772d18610293a Mon Sep 17 00:00:00 2001 From: Jim Fulton Date: Tue, 28 Sep 2021 17:56:16 -0400 Subject: [PATCH 008/210] test: No warnings (#9) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: - [x] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/python-db-dtypes-pandas/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea - [x] Ensure the tests and linter pass - [x] Code coverage does not decrease (if any source code was changed) - [x] Appropriate docs were updated (if necessary) Fixes #2 🦕 This builds-on/includes #8, which builds-on/includes #7. --- packages/db-dtypes/db_dtypes/__init__.py | 49 ++++++++-- packages/db-dtypes/db_dtypes/core.py | 6 ++ .../db-dtypes/db_dtypes/pandas_backports.py | 11 ++- .../db-dtypes/testing/constraints-3.6.txt | 1 + .../db-dtypes/testing/constraints-3.7.txt | 1 + .../db-dtypes/testing/constraints-3.8.txt | 1 + packages/db-dtypes/tests/unit/test_dtypes.py | 90 +++++++++++++++---- 7 files changed, 136 insertions(+), 23 deletions(-) diff --git a/packages/db-dtypes/db_dtypes/__init__.py b/packages/db-dtypes/db_dtypes/__init__.py index 8a58666e7f52..9495f0c29a3f 100644 --- a/packages/db-dtypes/db_dtypes/__init__.py +++ b/packages/db-dtypes/db_dtypes/__init__.py @@ -16,8 +16,10 @@ """ import datetime +import re import numpy +import packaging.version import pandas import pandas.compat.numpy.function import pandas.core.algorithms @@ -35,6 +37,8 @@ date_dtype_name = "date" time_dtype_name = "time" +pandas_release = packaging.version.parse(pandas.__version__).release + @pandas.core.dtypes.dtypes.register_extension_dtype class TimeDtype(core.BaseDatetimeDtype): @@ -61,15 +65,33 @@ class TimeArray(core.BaseDatetimeArray): _npepoch = numpy.datetime64(_epoch) @classmethod - def _datetime(cls, scalar): + def _datetime( + cls, + scalar, + match_fn=re.compile( + r"\s*(?P\d+)(?::(?P\d+)(?::(?P\d+(?:[.]\d+)?)?)?)?\s*$" + ).match, + ): if isinstance(scalar, datetime.time): return datetime.datetime.combine(cls._epoch, scalar) elif isinstance(scalar, str): # iso string - h, m, s = map(float, scalar.split(":")) - s, us = divmod(s, 1) + match = match_fn(scalar) + if not match: + raise ValueError(f"Bad time string: {repr(scalar)}") + + hour = match.group("hour") + minute = match.group("minute") + second = match.group("second") + second, microsecond = divmod(float(second if second else 0), 1) return datetime.datetime( - 1970, 1, 1, int(h), int(m), int(s), int(us * 1000000) + 1970, + 1, + 1, + int(hour), + int(minute if minute else 0), + int(second), + int(microsecond * 1_000_000), ) else: raise TypeError("Invalid value type", scalar) @@ -96,6 +118,11 @@ def astype(self, dtype, copy=True): else: return super().astype(dtype, copy=copy) + if pandas_release < (1,): + + def to_numpy(self, dtype="object"): + return self.astype(dtype) + def __arrow_array__(self, type=None): return pyarrow.array( self.to_numpy(), type=type if type is not None else pyarrow.time64("ns"), @@ -125,12 +152,20 @@ class DateArray(core.BaseDatetimeArray): dtype = DateDtype() @staticmethod - def _datetime(scalar): + def _datetime( + scalar, + match_fn=re.compile(r"\s*(?P\d+)-(?P\d+)-(?P\d+)\s*$").match, + ): if isinstance(scalar, datetime.date): return datetime.datetime(scalar.year, scalar.month, scalar.day) elif isinstance(scalar, str): - # iso string - return datetime.datetime(*map(int, scalar.split("-"))) + match = match_fn(scalar) + if not match: + raise ValueError(f"Bad date string: {repr(scalar)}") + year = int(match.group("year")) + month = int(match.group("month")) + day = int(match.group("day")) + return datetime.datetime(year, month, day) else: raise TypeError("Invalid value type", scalar) diff --git a/packages/db-dtypes/db_dtypes/core.py b/packages/db-dtypes/db_dtypes/core.py index 3b05ad6d5e01..fbc784efe993 100644 --- a/packages/db-dtypes/db_dtypes/core.py +++ b/packages/db-dtypes/db_dtypes/core.py @@ -84,6 +84,12 @@ def astype(self, dtype, copy=True): return super().astype(dtype, copy=copy) def _cmp_method(self, other, op): + oshape = getattr(other, "shape", None) + if oshape != self.shape and oshape != (1,) and self.shape != (1,): + raise TypeError( + "Can't compare arrays with different shapes", self.shape, oshape + ) + if type(other) != type(self): return NotImplemented return op(self._ndarray, other._ndarray) diff --git a/packages/db-dtypes/db_dtypes/pandas_backports.py b/packages/db-dtypes/db_dtypes/pandas_backports.py index bfeb148b9dea..003224f34536 100644 --- a/packages/db-dtypes/db_dtypes/pandas_backports.py +++ b/packages/db-dtypes/db_dtypes/pandas_backports.py @@ -31,8 +31,17 @@ def import_default(module_name, force=False, default=None): + """ + Provide an implementation for a class or function when it can't be imported + + or when force is True. + + This is used to replicate Pandas APIs that are missing or insufficient + (thus the force option) in early pandas versions. + """ + if default is None: - return lambda func: import_default(module_name, force, func) + return lambda func_or_class: import_default(module_name, force, func_or_class) if force: return default diff --git a/packages/db-dtypes/testing/constraints-3.6.txt b/packages/db-dtypes/testing/constraints-3.6.txt index fd89d90a2e16..a7388cdc2344 100644 --- a/packages/db-dtypes/testing/constraints-3.6.txt +++ b/packages/db-dtypes/testing/constraints-3.6.txt @@ -5,6 +5,7 @@ # # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", packaging==17.0 +# Make sure we test with pandas 0.24.2. The Python version isn't that relevant. pandas==0.24.2 pyarrow==3.0.0 numpy==1.16.6 diff --git a/packages/db-dtypes/testing/constraints-3.7.txt b/packages/db-dtypes/testing/constraints-3.7.txt index 684864f2bcde..0b3b3097c5df 100644 --- a/packages/db-dtypes/testing/constraints-3.7.txt +++ b/packages/db-dtypes/testing/constraints-3.7.txt @@ -1 +1,2 @@ +# Make sure we test with pandas 1.1.0. The Python version isn't that relevant. pandas==1.1.0 diff --git a/packages/db-dtypes/testing/constraints-3.8.txt b/packages/db-dtypes/testing/constraints-3.8.txt index 3fd8886e64d1..2e7f3549a6f6 100644 --- a/packages/db-dtypes/testing/constraints-3.8.txt +++ b/packages/db-dtypes/testing/constraints-3.8.txt @@ -1 +1,2 @@ +# Make sure we test with pandas 1.2.0. The Python version isn't that relevant. pandas==1.2.0 diff --git a/packages/db-dtypes/tests/unit/test_dtypes.py b/packages/db-dtypes/tests/unit/test_dtypes.py index 499163978b38..eca3a317a047 100644 --- a/packages/db-dtypes/tests/unit/test_dtypes.py +++ b/packages/db-dtypes/tests/unit/test_dtypes.py @@ -15,6 +15,7 @@ import datetime import packaging.version +import pyarrow.lib import pytest pd = pytest.importorskip("pandas") @@ -171,18 +172,14 @@ def test_timearray_comparisons( # Bad shape for bad_shape in ([], [1, 2, 3]): - if op == "==": - assert not comparisons[op](left, np.array(bad_shape)) - assert complements[op](left, np.array(bad_shape)) - else: - with pytest.raises( - ValueError, match="operands could not be broadcast together", - ): - comparisons[op](left, np.array(bad_shape)) - with pytest.raises( - ValueError, match="operands could not be broadcast together", - ): - complements[op](left, np.array(bad_shape)) + with pytest.raises( + TypeError, match="Can't compare arrays with different shapes" + ): + comparisons[op](left, np.array(bad_shape)) + with pytest.raises( + TypeError, match="Can't compare arrays with different shapes" + ): + complements[op](left, np.array(bad_shape)) # Bad items for bad_items in ( @@ -478,8 +475,10 @@ def test_asdatetime(dtype, same): ) def test_astimedelta(dtype): t = "01:02:03.123456" - expect = pd.to_timedelta([t]).array.astype( - "timedelta64[ns]" if dtype == "timedelta" else dtype + expect = ( + pd.to_timedelta([t]) + .to_numpy() + .astype("timedelta64[ns]" if dtype == "timedelta" else dtype) ) a = _cls("time")([t, None]) @@ -543,7 +542,10 @@ def test_min_max_median(dtype): assert empty.min(skipna=False) is None assert empty.max(skipna=False) is None if pandas_release >= (1, 2): - assert empty.median() is None + with pytest.warns(RuntimeWarning, match="empty slice"): + # It's weird that we get the warning here, and not + # below. :/ + assert empty.median() is None assert empty.median(skipna=False) is None a = _make_one(dtype) @@ -620,3 +622,61 @@ def test_date_sub(): do = pd.Series([pd.DateOffset(days=i) for i in range(4)]) expect = dates.astype("object") - do assert np.array_equal(dates - do, expect) + + +@pytest.mark.parametrize( + "value, expected", [("1", datetime.time(1)), ("1:2", datetime.time(1, 2))], +) +def test_short_time_parsing(value, expected): + assert _cls("time")([value])[0] == expected + + +@pytest.mark.parametrize( + "value, error", + [ + ("thursday", "Bad time string: 'thursday'"), + ("1:2:3thursday", "Bad time string: '1:2:3thursday'"), + ("1:2:3:4", "Bad time string: '1:2:3:4'"), + ("1:2:3.f", "Bad time string: '1:2:3.f'"), + ("1:d:3", "Bad time string: '1:d:3'"), + ("1:2.3", "Bad time string: '1:2.3'"), + ("", "Bad time string: ''"), + ("1:2:99", "second must be in 0[.][.]59"), + ("1:99", "minute must be in 0[.][.]59"), + ("99", "hour must be in 0[.][.]23"), + ], +) +def test_bad_time_parsing(value, error): + with pytest.raises(ValueError, match=error): + _cls("time")([value]) + + +@pytest.mark.parametrize( + "value, error", + [ + ("thursday", "Bad date string: 'thursday'"), + ("1-2-thursday", "Bad date string: '1-2-thursday'"), + ("1-2-3-4", "Bad date string: '1-2-3-4'"), + ("1-2-3.f", "Bad date string: '1-2-3.f'"), + ("1-d-3", "Bad date string: '1-d-3'"), + ("1-3", "Bad date string: '1-3'"), + ("1", "Bad date string: '1'"), + ("", "Bad date string: ''"), + ("2021-2-99", "day is out of range for month"), + ("2021-99-1", "month must be in 1[.][.]12"), + ("10000-1-1", "year 10000 is out of range"), + ], +) +def test_bad_date_parsing(value, error): + with pytest.raises(ValueError, match=error): + _cls("date")([value]) + + +@for_date_and_time +def test_date___arrow__array__(dtype): + a = _make_one(dtype) + ar = a.__arrow_array__() + assert isinstance( + ar, pyarrow.Date32Array if dtype == "date" else pyarrow.Time64Array, + ) + assert [v.as_py() for v in ar] == list(a) From 345a4e81d95102d9413292609d82c041ae7706ad Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Wed, 29 Sep 2021 11:31:55 -0500 Subject: [PATCH 009/210] fix: support converting empty `time` Series to pyarrow Array (#11) * fix: support converting empty `time` Series to pyarrow Array * use object dtype for time numpy array * backport to_numpy * remove redundant test --- packages/db-dtypes/db_dtypes/__init__.py | 3 +- packages/db-dtypes/tests/unit/test_arrow.py | 163 +++++++++++++++++++ packages/db-dtypes/tests/unit/test_dtypes.py | 11 -- 3 files changed, 165 insertions(+), 12 deletions(-) create mode 100644 packages/db-dtypes/tests/unit/test_arrow.py diff --git a/packages/db-dtypes/db_dtypes/__init__.py b/packages/db-dtypes/db_dtypes/__init__.py index 9495f0c29a3f..b4a43a3966d5 100644 --- a/packages/db-dtypes/db_dtypes/__init__.py +++ b/packages/db-dtypes/db_dtypes/__init__.py @@ -125,7 +125,8 @@ def to_numpy(self, dtype="object"): def __arrow_array__(self, type=None): return pyarrow.array( - self.to_numpy(), type=type if type is not None else pyarrow.time64("ns"), + self.to_numpy(dtype="object"), + type=type if type is not None else pyarrow.time64("ns"), ) diff --git a/packages/db-dtypes/tests/unit/test_arrow.py b/packages/db-dtypes/tests/unit/test_arrow.py new file mode 100644 index 000000000000..dd0aed78791f --- /dev/null +++ b/packages/db-dtypes/tests/unit/test_arrow.py @@ -0,0 +1,163 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime as dt + +import pandas +import pyarrow +import pytest + +# To register the types. +import db_dtypes # noqa + + +@pytest.mark.parametrize( + ("series", "expected"), + ( + (pandas.Series([], dtype="date"), pyarrow.array([], type=pyarrow.date32())), + ( + pandas.Series([None, None, None], dtype="date"), + pyarrow.array([None, None, None], type=pyarrow.date32()), + ), + ( + pandas.Series( + [dt.date(2021, 9, 27), None, dt.date(2011, 9, 27)], dtype="date" + ), + pyarrow.array( + [dt.date(2021, 9, 27), None, dt.date(2011, 9, 27)], + type=pyarrow.date32(), + ), + ), + ( + pandas.Series( + [dt.date(1677, 9, 22), dt.date(1970, 1, 1), dt.date(2262, 4, 11)], + dtype="date", + ), + pyarrow.array( + [dt.date(1677, 9, 22), dt.date(1970, 1, 1), dt.date(2262, 4, 11)], + type=pyarrow.date32(), + ), + ), + (pandas.Series([], dtype="time"), pyarrow.array([], type=pyarrow.time64("ns"))), + ( + pandas.Series([None, None, None], dtype="time"), + pyarrow.array([None, None, None], type=pyarrow.time64("ns")), + ), + ( + pandas.Series( + [dt.time(0, 0, 0, 0), None, dt.time(23, 59, 59, 999_999)], dtype="time" + ), + pyarrow.array( + [dt.time(0, 0, 0, 0), None, dt.time(23, 59, 59, 999_999)], + type=pyarrow.time64("ns"), + ), + ), + ( + pandas.Series( + [ + dt.time(0, 0, 0, 0), + dt.time(12, 30, 15, 125_000), + dt.time(23, 59, 59, 999_999), + ], + dtype="time", + ), + pyarrow.array( + [ + dt.time(0, 0, 0, 0), + dt.time(12, 30, 15, 125_000), + dt.time(23, 59, 59, 999_999), + ], + type=pyarrow.time64("ns"), + ), + ), + ), +) +def test_to_arrow(series, expected): + array = pyarrow.array(series) + assert array.equals(expected) + + +@pytest.mark.parametrize( + ("series", "expected"), + ( + (pandas.Series([], dtype="date"), pyarrow.array([], type=pyarrow.date64())), + ( + pandas.Series([None, None, None], dtype="date"), + pyarrow.array([None, None, None], type=pyarrow.date64()), + ), + ( + pandas.Series( + [dt.date(2021, 9, 27), None, dt.date(2011, 9, 27)], dtype="date" + ), + pyarrow.array( + [dt.date(2021, 9, 27), None, dt.date(2011, 9, 27)], + type=pyarrow.date64(), + ), + ), + ( + pandas.Series( + [dt.date(1677, 9, 22), dt.date(1970, 1, 1), dt.date(2262, 4, 11)], + dtype="date", + ), + pyarrow.array( + [dt.date(1677, 9, 22), dt.date(1970, 1, 1), dt.date(2262, 4, 11)], + type=pyarrow.date64(), + ), + ), + (pandas.Series([], dtype="time"), pyarrow.array([], type=pyarrow.time32("ms"))), + ( + pandas.Series([None, None, None], dtype="time"), + pyarrow.array([None, None, None], type=pyarrow.time32("ms")), + ), + ( + pandas.Series( + [dt.time(0, 0, 0, 0), None, dt.time(23, 59, 59, 999_000)], dtype="time" + ), + pyarrow.array( + [dt.time(0, 0, 0, 0), None, dt.time(23, 59, 59, 999_000)], + type=pyarrow.time32("ms"), + ), + ), + ( + pandas.Series( + [dt.time(0, 0, 0, 0), None, dt.time(23, 59, 59, 999_999)], dtype="time" + ), + pyarrow.array( + [dt.time(0, 0, 0, 0), None, dt.time(23, 59, 59, 999_999)], + type=pyarrow.time64("us"), + ), + ), + ( + pandas.Series( + [ + dt.time(0, 0, 0, 0), + dt.time(12, 30, 15, 125_000), + dt.time(23, 59, 59, 999_999), + ], + dtype="time", + ), + pyarrow.array( + [ + dt.time(0, 0, 0, 0), + dt.time(12, 30, 15, 125_000), + dt.time(23, 59, 59, 999_999), + ], + type=pyarrow.time64("us"), + ), + ), + ), +) +def test_to_arrow_w_arrow_type(series, expected): + array = pyarrow.array(series, type=expected.type) + assert array.equals(expected) diff --git a/packages/db-dtypes/tests/unit/test_dtypes.py b/packages/db-dtypes/tests/unit/test_dtypes.py index eca3a317a047..118458ea695c 100644 --- a/packages/db-dtypes/tests/unit/test_dtypes.py +++ b/packages/db-dtypes/tests/unit/test_dtypes.py @@ -15,7 +15,6 @@ import datetime import packaging.version -import pyarrow.lib import pytest pd = pytest.importorskip("pandas") @@ -670,13 +669,3 @@ def test_bad_time_parsing(value, error): def test_bad_date_parsing(value, error): with pytest.raises(ValueError, match=error): _cls("date")([value]) - - -@for_date_and_time -def test_date___arrow__array__(dtype): - a = _make_one(dtype) - ar = a.__arrow_array__() - assert isinstance( - ar, pyarrow.Date32Array if dtype == "date" else pyarrow.Time64Array, - ) - assert [v.as_py() for v in ar] == list(a) From 95f64fd788f671e5c719511c4c422d62ba5556e7 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 29 Sep 2021 16:40:49 +0000 Subject: [PATCH 010/210] chore: release 0.1.0 (#4) :robot: I have created a release \*beep\* \*boop\* --- ## 0.1.0 (2021-09-29) ### Features * add `time` and `date` dtypes ([f104171](https://www.github.com/googleapis/python-db-dtypes-pandas/commit/f10417111642e8f5f4b9af790367af930d15a056)) ### Bug Fixes * support converting empty `time` Series to pyarrow Array ([#11](https://www.github.com/googleapis/python-db-dtypes-pandas/issues/11)) ([7675b15](https://www.github.com/googleapis/python-db-dtypes-pandas/commit/7675b157feb842628fa731cc6a472aa9e6b92903)) * support Pandas 0.24 ([#8](https://www.github.com/googleapis/python-db-dtypes-pandas/issues/8)) ([e996883](https://www.github.com/googleapis/python-db-dtypes-pandas/commit/e996883bc9c76fe5f593e9c19a9d2a1c13501f5e)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- packages/db-dtypes/CHANGELOG.md | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/packages/db-dtypes/CHANGELOG.md b/packages/db-dtypes/CHANGELOG.md index e59584268120..e8b14ab7076e 100644 --- a/packages/db-dtypes/CHANGELOG.md +++ b/packages/db-dtypes/CHANGELOG.md @@ -1,5 +1,14 @@ # Changelog -[db-dtypes PyPI History][2] +## 0.1.0 (2021-09-29) -[2]: https://pypi.org/project/db-dtypes/#history + +### Features + +* add `time` and `date` dtypes ([f104171](https://www.github.com/googleapis/python-db-dtypes-pandas/commit/f10417111642e8f5f4b9af790367af930d15a056)) + + +### Bug Fixes + +* support converting empty `time` Series to pyarrow Array ([#11](https://www.github.com/googleapis/python-db-dtypes-pandas/issues/11)) ([7675b15](https://www.github.com/googleapis/python-db-dtypes-pandas/commit/7675b157feb842628fa731cc6a472aa9e6b92903)) +* support Pandas 0.24 ([#8](https://www.github.com/googleapis/python-db-dtypes-pandas/issues/8)) ([e996883](https://www.github.com/googleapis/python-db-dtypes-pandas/commit/e996883bc9c76fe5f593e9c19a9d2a1c13501f5e)) From 6f3cfb4dd1d497821cdde67f8e7b3d0c57e88ebf Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Wed, 29 Sep 2021 12:39:56 -0500 Subject: [PATCH 011/210] chore: fix version number (#12) --- packages/db-dtypes/db_dtypes/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/db-dtypes/db_dtypes/version.py b/packages/db-dtypes/db_dtypes/version.py index 49866fc2ae28..a906ae65b9b2 100644 --- a/packages/db-dtypes/db_dtypes/version.py +++ b/packages/db-dtypes/db_dtypes/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "0.0.1a2" +__version__ = "0.1.0" From 8d7e93c597613a084c899e83351bafccf9852b6c Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 30 Sep 2021 15:44:45 +0000 Subject: [PATCH 012/210] chore: fail samples nox session if python version is missing (#17) --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 16 +--------------- packages/db-dtypes/samples/snippets/noxfile.py | 4 ++++ 2 files changed, 5 insertions(+), 15 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index e57856d16582..ae6c57fad807 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -1,17 +1,3 @@ -# Copyright 2021 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:a3a85c2e0b3293068e47b1635b178f7e3d3845f2cfb8722de6713d4bbafdcd1d + digest: sha256:82b12321da4446a73cb11bcb6812fbec8c105abda3946d46e6394e5fbfb64c0f diff --git a/packages/db-dtypes/samples/snippets/noxfile.py b/packages/db-dtypes/samples/snippets/noxfile.py index b008613f03ff..1fd8956fbf01 100644 --- a/packages/db-dtypes/samples/snippets/noxfile.py +++ b/packages/db-dtypes/samples/snippets/noxfile.py @@ -98,6 +98,10 @@ def get_pytest_env_vars() -> Dict[str, str]: "True", "true", ) + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + # # Style Checks # From a6cd77acdc008e5777198e9beda249bbb350c7b3 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 4 Oct 2021 16:37:45 +0200 Subject: [PATCH 013/210] chore(deps): update dependency pytz to v2021.3 (#21) --- packages/db-dtypes/dev_requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/db-dtypes/dev_requirements.txt b/packages/db-dtypes/dev_requirements.txt index 31b4ad771eed..6df8cc152ac5 100644 --- a/packages/db-dtypes/dev_requirements.txt +++ b/packages/db-dtypes/dev_requirements.txt @@ -4,4 +4,4 @@ future==0.18.2 pytest==6.2.5 pytest-flake8==1.0.7 -pytz==2021.1 +pytz==2021.3 From 2bf53846a93cf444ec0b79abad2d3857c29a47ec Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 4 Oct 2021 11:44:27 -0400 Subject: [PATCH 014/210] chore: add default_version and codeowner_team to .repo-metadata.json (#22) * chore: add default_version and codeowner_team to .repo-metadata.json * update codeowner_team --- packages/db-dtypes/.repo-metadata.json | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/packages/db-dtypes/.repo-metadata.json b/packages/db-dtypes/.repo-metadata.json index 7b37150c6c5e..0f8ca37ea22a 100644 --- a/packages/db-dtypes/.repo-metadata.json +++ b/packages/db-dtypes/.repo-metadata.json @@ -1,12 +1,13 @@ { - "name": "db-dtypes", - "name_pretty": "Pandas Data Types for SQL systems (BigQuery, Spanner)", - "client_documentation": - "https://googleapis.dev/python/db-dtypes/latest/index.html", - "release_level": "beta", - "language": "python", - "library_type": "INTEGRATION", - "repo": "googleapis/python-db-dtypes-pandas", - "distribution_name": "db-dtypes", - "api_id": "bigquery.googleapis.com" + "name": "db-dtypes", + "name_pretty": "Pandas Data Types for SQL systems (BigQuery, Spanner)", + "client_documentation": "https://googleapis.dev/python/db-dtypes/latest/index.html", + "release_level": "beta", + "language": "python", + "library_type": "INTEGRATION", + "repo": "googleapis/python-db-dtypes-pandas", + "distribution_name": "db-dtypes", + "api_id": "bigquery.googleapis.com", + "default_version": "", + "codeowner_team": "@googleapis/api-bigquery" } From c4f2b5f8c67ff21e16482c4d76034711c55a19fe Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 4 Oct 2021 13:37:37 -0400 Subject: [PATCH 015/210] chore: drop 'dev-requirements.txt' (#23) It is not used by any CI or other script. Motivated by: https://github.com/googleapis/python-db-dtypes-pandas/pull/21#issuecomment-933659497 --- packages/db-dtypes/dev_requirements.txt | 7 ------- 1 file changed, 7 deletions(-) delete mode 100644 packages/db-dtypes/dev_requirements.txt diff --git a/packages/db-dtypes/dev_requirements.txt b/packages/db-dtypes/dev_requirements.txt deleted file mode 100644 index 6df8cc152ac5..000000000000 --- a/packages/db-dtypes/dev_requirements.txt +++ /dev/null @@ -1,7 +0,0 @@ -sqlalchemy>=1.1.9 -google-cloud-bigquery>=1.6.0 -future==0.18.2 - -pytest==6.2.5 -pytest-flake8==1.0.7 -pytz==2021.3 From c2fbaad4db3e38f9d27f9d6469985f1027eb407e Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Mon, 4 Oct 2021 15:58:52 -0500 Subject: [PATCH 016/210] fix: avoid rounding problems with microseconds (#20) * fix: avoid rounding problems with microseconds * add more tests * adjust regex to parse fraction as integer --- packages/db-dtypes/db_dtypes/__init__.py | 26 +++--- packages/db-dtypes/tests/unit/test_date.py | 62 +++++++++++++++ packages/db-dtypes/tests/unit/test_dtypes.py | 48 ----------- packages/db-dtypes/tests/unit/test_time.py | 84 ++++++++++++++++++++ 4 files changed, 161 insertions(+), 59 deletions(-) create mode 100644 packages/db-dtypes/tests/unit/test_date.py create mode 100644 packages/db-dtypes/tests/unit/test_time.py diff --git a/packages/db-dtypes/db_dtypes/__init__.py b/packages/db-dtypes/db_dtypes/__init__.py index b4a43a3966d5..c2e91a19f194 100644 --- a/packages/db-dtypes/db_dtypes/__init__.py +++ b/packages/db-dtypes/db_dtypes/__init__.py @@ -69,29 +69,33 @@ def _datetime( cls, scalar, match_fn=re.compile( - r"\s*(?P\d+)(?::(?P\d+)(?::(?P\d+(?:[.]\d+)?)?)?)?\s*$" + r"\s*(?P\d+)" + r"(?::(?P\d+)" + r"(?::(?P\d+)" + r"(?:\.(?P\d*))?)?)?\s*$" ).match, ): if isinstance(scalar, datetime.time): return datetime.datetime.combine(cls._epoch, scalar) elif isinstance(scalar, str): # iso string - match = match_fn(scalar) - if not match: + parsed = match_fn(scalar) + if not parsed: raise ValueError(f"Bad time string: {repr(scalar)}") - hour = match.group("hour") - minute = match.group("minute") - second = match.group("second") - second, microsecond = divmod(float(second if second else 0), 1) + hours = parsed.group("hours") + minutes = parsed.group("minutes") + seconds = parsed.group("seconds") + fraction = parsed.group("fraction") + microseconds = int(fraction.ljust(6, "0")[:6]) if fraction else 0 return datetime.datetime( 1970, 1, 1, - int(hour), - int(minute if minute else 0), - int(second), - int(microsecond * 1_000_000), + int(hours), + int(minutes) if minutes else 0, + int(seconds) if seconds else 0, + microseconds, ) else: raise TypeError("Invalid value type", scalar) diff --git a/packages/db-dtypes/tests/unit/test_date.py b/packages/db-dtypes/tests/unit/test_date.py new file mode 100644 index 000000000000..71e704a17570 --- /dev/null +++ b/packages/db-dtypes/tests/unit/test_date.py @@ -0,0 +1,62 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime + +import pandas +import pytest + +# To register the types. +import db_dtypes # noqa + + +@pytest.mark.parametrize( + "value, expected", + [ + # Min/Max values for pandas.Timestamp. + ("1677-09-22", datetime.date(1677, 9, 22)), + ("2262-04-11", datetime.date(2262, 4, 11)), + # Typical "zero" values. + ("1900-01-01", datetime.date(1900, 1, 1)), + ("1970-01-01", datetime.date(1970, 1, 1)), + # Assorted values. + ("1993-10-31", datetime.date(1993, 10, 31)), + ("2012-02-29", datetime.date(2012, 2, 29)), + ("2021-12-17", datetime.date(2021, 12, 17)), + ("2038-01-19", datetime.date(2038, 1, 19)), + ], +) +def test_date_parsing(value, expected): + assert pandas.Series([value], dtype="date")[0] == expected + + +@pytest.mark.parametrize( + "value, error", + [ + ("thursday", "Bad date string: 'thursday'"), + ("1-2-thursday", "Bad date string: '1-2-thursday'"), + ("1-2-3-4", "Bad date string: '1-2-3-4'"), + ("1-2-3.f", "Bad date string: '1-2-3.f'"), + ("1-d-3", "Bad date string: '1-d-3'"), + ("1-3", "Bad date string: '1-3'"), + ("1", "Bad date string: '1'"), + ("", "Bad date string: ''"), + ("2021-2-99", "day is out of range for month"), + ("2021-99-1", "month must be in 1[.][.]12"), + ("10000-1-1", "year 10000 is out of range"), + ], +) +def test_date_parsing_errors(value, error): + with pytest.raises(ValueError, match=error): + pandas.Series([value], dtype="date") diff --git a/packages/db-dtypes/tests/unit/test_dtypes.py b/packages/db-dtypes/tests/unit/test_dtypes.py index 118458ea695c..a514c475b4aa 100644 --- a/packages/db-dtypes/tests/unit/test_dtypes.py +++ b/packages/db-dtypes/tests/unit/test_dtypes.py @@ -621,51 +621,3 @@ def test_date_sub(): do = pd.Series([pd.DateOffset(days=i) for i in range(4)]) expect = dates.astype("object") - do assert np.array_equal(dates - do, expect) - - -@pytest.mark.parametrize( - "value, expected", [("1", datetime.time(1)), ("1:2", datetime.time(1, 2))], -) -def test_short_time_parsing(value, expected): - assert _cls("time")([value])[0] == expected - - -@pytest.mark.parametrize( - "value, error", - [ - ("thursday", "Bad time string: 'thursday'"), - ("1:2:3thursday", "Bad time string: '1:2:3thursday'"), - ("1:2:3:4", "Bad time string: '1:2:3:4'"), - ("1:2:3.f", "Bad time string: '1:2:3.f'"), - ("1:d:3", "Bad time string: '1:d:3'"), - ("1:2.3", "Bad time string: '1:2.3'"), - ("", "Bad time string: ''"), - ("1:2:99", "second must be in 0[.][.]59"), - ("1:99", "minute must be in 0[.][.]59"), - ("99", "hour must be in 0[.][.]23"), - ], -) -def test_bad_time_parsing(value, error): - with pytest.raises(ValueError, match=error): - _cls("time")([value]) - - -@pytest.mark.parametrize( - "value, error", - [ - ("thursday", "Bad date string: 'thursday'"), - ("1-2-thursday", "Bad date string: '1-2-thursday'"), - ("1-2-3-4", "Bad date string: '1-2-3-4'"), - ("1-2-3.f", "Bad date string: '1-2-3.f'"), - ("1-d-3", "Bad date string: '1-d-3'"), - ("1-3", "Bad date string: '1-3'"), - ("1", "Bad date string: '1'"), - ("", "Bad date string: ''"), - ("2021-2-99", "day is out of range for month"), - ("2021-99-1", "month must be in 1[.][.]12"), - ("10000-1-1", "year 10000 is out of range"), - ], -) -def test_bad_date_parsing(value, error): - with pytest.raises(ValueError, match=error): - _cls("date")([value]) diff --git a/packages/db-dtypes/tests/unit/test_time.py b/packages/db-dtypes/tests/unit/test_time.py new file mode 100644 index 000000000000..4a6adc816afb --- /dev/null +++ b/packages/db-dtypes/tests/unit/test_time.py @@ -0,0 +1,84 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime + +import pandas +import pytest + +# To register the types. +import db_dtypes # noqa + + +@pytest.mark.parametrize( + "value, expected", + [ + # Midnight + ("0", datetime.time(0)), + ("0:0", datetime.time(0)), + ("0:0:0", datetime.time(0)), + ("0:0:0.", datetime.time(0)), + ("0:0:0.0", datetime.time(0)), + ("0:0:0.000000", datetime.time(0)), + ("00:00:00", datetime.time(0, 0, 0)), + (" 00:00:00 ", datetime.time(0, 0, 0)), + # Short values + ("1", datetime.time(1)), + ("23", datetime.time(23)), + ("1:2", datetime.time(1, 2)), + ("23:59", datetime.time(23, 59)), + ("1:2:3", datetime.time(1, 2, 3)), + ("23:59:59", datetime.time(23, 59, 59)), + # Non-octal values. + ("08:08:08", datetime.time(8, 8, 8)), + ("09:09:09", datetime.time(9, 9, 9)), + # Fractional seconds can cause rounding problems if cast to float. See: + # https://github.com/googleapis/python-db-dtypes-pandas/issues/18 + ("0:0:59.876543", datetime.time(0, 0, 59, 876543)), + ("01:01:01.010101", datetime.time(1, 1, 1, 10101)), + ("09:09:09.090909", datetime.time(9, 9, 9, 90909)), + ("11:11:11.111111", datetime.time(11, 11, 11, 111111)), + ("19:16:23.987654", datetime.time(19, 16, 23, 987654)), + # Microsecond precision + ("00:00:00.000001", datetime.time(0, 0, 0, 1)), + ("23:59:59.999999", datetime.time(23, 59, 59, 999_999)), + # TODO: Support nanosecond precision values without truncation. + # https://github.com/googleapis/python-db-dtypes-pandas/issues/19 + ("0:0:0.000001001", datetime.time(0, 0, 0, 1)), + ("23:59:59.999999000", datetime.time(23, 59, 59, 999_999)), + ("23:59:59.999999999", datetime.time(23, 59, 59, 999_999)), + ], +) +def test_time_parsing(value, expected): + assert pandas.Series([value], dtype="time")[0] == expected + + +@pytest.mark.parametrize( + "value, error", + [ + ("thursday", "Bad time string: 'thursday'"), + ("1:2:3thursday", "Bad time string: '1:2:3thursday'"), + ("1:2:3:4", "Bad time string: '1:2:3:4'"), + ("1:2:3.f", "Bad time string: '1:2:3.f'"), + ("1:d:3", "Bad time string: '1:d:3'"), + ("1:2.3", "Bad time string: '1:2.3'"), + ("", "Bad time string: ''"), + ("1:2:99", "second must be in 0[.][.]59"), + ("1:99", "minute must be in 0[.][.]59"), + ("99", "hour must be in 0[.][.]23"), + ], +) +def test_time_parsing_errors(value, error): + with pytest.raises(ValueError, match=error): + pandas.Series([value], dtype="time") From f5d6306d6b068d4c1c25c5b1b7d8f99db75af535 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 4 Oct 2021 21:06:10 +0000 Subject: [PATCH 017/210] chore: release 0.1.1 (#24) :robot: I have created a release \*beep\* \*boop\* --- ### [0.1.1](https://www.github.com/googleapis/python-db-dtypes-pandas/compare/v0.1.0...v0.1.1) (2021-10-04) ### Bug Fixes * avoid rounding problems with microseconds ([#20](https://www.github.com/googleapis/python-db-dtypes-pandas/issues/20)) ([0ff7371](https://www.github.com/googleapis/python-db-dtypes-pandas/commit/0ff737120344602f49889596b1efa69a6a18a057)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- packages/db-dtypes/CHANGELOG.md | 7 +++++++ packages/db-dtypes/db_dtypes/version.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/db-dtypes/CHANGELOG.md b/packages/db-dtypes/CHANGELOG.md index e8b14ab7076e..c0d8b28ceaaf 100644 --- a/packages/db-dtypes/CHANGELOG.md +++ b/packages/db-dtypes/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.1.1](https://www.github.com/googleapis/python-db-dtypes-pandas/compare/v0.1.0...v0.1.1) (2021-10-04) + + +### Bug Fixes + +* avoid rounding problems with microseconds ([#20](https://www.github.com/googleapis/python-db-dtypes-pandas/issues/20)) ([0ff7371](https://www.github.com/googleapis/python-db-dtypes-pandas/commit/0ff737120344602f49889596b1efa69a6a18a057)) + ## 0.1.0 (2021-09-29) diff --git a/packages/db-dtypes/db_dtypes/version.py b/packages/db-dtypes/db_dtypes/version.py index a906ae65b9b2..9cbc9e817f0f 100644 --- a/packages/db-dtypes/db_dtypes/version.py +++ b/packages/db-dtypes/db_dtypes/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "0.1.0" +__version__ = "0.1.1" From 2ac34bc65bfa2e369a3252b0f47ae5906d11bdb7 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 5 Oct 2021 11:17:25 -0600 Subject: [PATCH 018/210] build: use trampoline_v2 for python samples and allow custom dockerfile (#25) Source-Link: https://github.com/googleapis/synthtool/commit/a7ed11ec0863c422ba2e73aafa75eab22c32b33d Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:6e7328583be8edd3ba8f35311c76a1ecbc823010279ccb6ab46b7a76e25eafcc Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 2 +- .../db-dtypes/.kokoro/samples/lint/common.cfg | 2 +- .../.kokoro/samples/python3.6/common.cfg | 2 +- .../.kokoro/samples/python3.6/periodic.cfg | 2 +- .../.kokoro/samples/python3.7/common.cfg | 2 +- .../.kokoro/samples/python3.7/periodic.cfg | 2 +- .../.kokoro/samples/python3.8/common.cfg | 2 +- .../.kokoro/samples/python3.8/periodic.cfg | 2 +- .../.kokoro/samples/python3.9/common.cfg | 2 +- .../.kokoro/samples/python3.9/periodic.cfg | 2 +- .../.kokoro/test-samples-against-head.sh | 2 -- packages/db-dtypes/.kokoro/test-samples.sh | 2 -- packages/db-dtypes/.trampolinerc | 17 ++++++++++++++--- 13 files changed, 24 insertions(+), 17 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index ae6c57fad807..ee94722ab57b 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:82b12321da4446a73cb11bcb6812fbec8c105abda3946d46e6394e5fbfb64c0f + digest: sha256:6e7328583be8edd3ba8f35311c76a1ecbc823010279ccb6ab46b7a76e25eafcc diff --git a/packages/db-dtypes/.kokoro/samples/lint/common.cfg b/packages/db-dtypes/.kokoro/samples/lint/common.cfg index 51b06c3ae843..ff39ec39f579 100644 --- a/packages/db-dtypes/.kokoro/samples/lint/common.cfg +++ b/packages/db-dtypes/.kokoro/samples/lint/common.cfg @@ -31,4 +31,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-db-dtypes-pandas/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-db-dtypes-pandas/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.6/common.cfg b/packages/db-dtypes/.kokoro/samples/python3.6/common.cfg index f5d0d5e11448..84c31ca6e7ef 100644 --- a/packages/db-dtypes/.kokoro/samples/python3.6/common.cfg +++ b/packages/db-dtypes/.kokoro/samples/python3.6/common.cfg @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-db-dtypes-pandas/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-db-dtypes-pandas/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.6/periodic.cfg b/packages/db-dtypes/.kokoro/samples/python3.6/periodic.cfg index 50fec9649732..71cd1e597e38 100644 --- a/packages/db-dtypes/.kokoro/samples/python3.6/periodic.cfg +++ b/packages/db-dtypes/.kokoro/samples/python3.6/periodic.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "False" -} \ No newline at end of file +} diff --git a/packages/db-dtypes/.kokoro/samples/python3.7/common.cfg b/packages/db-dtypes/.kokoro/samples/python3.7/common.cfg index 86401fadc1bb..cf54accebdf6 100644 --- a/packages/db-dtypes/.kokoro/samples/python3.7/common.cfg +++ b/packages/db-dtypes/.kokoro/samples/python3.7/common.cfg @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-db-dtypes-pandas/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-db-dtypes-pandas/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.7/periodic.cfg b/packages/db-dtypes/.kokoro/samples/python3.7/periodic.cfg index 50fec9649732..71cd1e597e38 100644 --- a/packages/db-dtypes/.kokoro/samples/python3.7/periodic.cfg +++ b/packages/db-dtypes/.kokoro/samples/python3.7/periodic.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "False" -} \ No newline at end of file +} diff --git a/packages/db-dtypes/.kokoro/samples/python3.8/common.cfg b/packages/db-dtypes/.kokoro/samples/python3.8/common.cfg index e461be16d4f9..a8500a8a6f2d 100644 --- a/packages/db-dtypes/.kokoro/samples/python3.8/common.cfg +++ b/packages/db-dtypes/.kokoro/samples/python3.8/common.cfg @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-db-dtypes-pandas/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-db-dtypes-pandas/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.8/periodic.cfg b/packages/db-dtypes/.kokoro/samples/python3.8/periodic.cfg index 50fec9649732..71cd1e597e38 100644 --- a/packages/db-dtypes/.kokoro/samples/python3.8/periodic.cfg +++ b/packages/db-dtypes/.kokoro/samples/python3.8/periodic.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "False" -} \ No newline at end of file +} diff --git a/packages/db-dtypes/.kokoro/samples/python3.9/common.cfg b/packages/db-dtypes/.kokoro/samples/python3.9/common.cfg index 7d3c9417364e..13262b7633e0 100644 --- a/packages/db-dtypes/.kokoro/samples/python3.9/common.cfg +++ b/packages/db-dtypes/.kokoro/samples/python3.9/common.cfg @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-db-dtypes-pandas/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-db-dtypes-pandas/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.9/periodic.cfg b/packages/db-dtypes/.kokoro/samples/python3.9/periodic.cfg index 50fec9649732..71cd1e597e38 100644 --- a/packages/db-dtypes/.kokoro/samples/python3.9/periodic.cfg +++ b/packages/db-dtypes/.kokoro/samples/python3.9/periodic.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "False" -} \ No newline at end of file +} diff --git a/packages/db-dtypes/.kokoro/test-samples-against-head.sh b/packages/db-dtypes/.kokoro/test-samples-against-head.sh index 9380cfbbc1cf..ba3a707b040c 100755 --- a/packages/db-dtypes/.kokoro/test-samples-against-head.sh +++ b/packages/db-dtypes/.kokoro/test-samples-against-head.sh @@ -23,6 +23,4 @@ set -eo pipefail # Enables `**` to include files nested inside sub-folders shopt -s globstar -cd github/python-db-dtypes-pandas - exec .kokoro/test-samples-impl.sh diff --git a/packages/db-dtypes/.kokoro/test-samples.sh b/packages/db-dtypes/.kokoro/test-samples.sh index 347c6146e067..11c042d342d7 100755 --- a/packages/db-dtypes/.kokoro/test-samples.sh +++ b/packages/db-dtypes/.kokoro/test-samples.sh @@ -24,8 +24,6 @@ set -eo pipefail # Enables `**` to include files nested inside sub-folders shopt -s globstar -cd github/python-db-dtypes-pandas - # Run periodic samples tests at latest release if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then # preserving the test runner implementation. diff --git a/packages/db-dtypes/.trampolinerc b/packages/db-dtypes/.trampolinerc index 383b6ec89fbc..0eee72ab62aa 100644 --- a/packages/db-dtypes/.trampolinerc +++ b/packages/db-dtypes/.trampolinerc @@ -16,15 +16,26 @@ # Add required env vars here. required_envvars+=( - "STAGING_BUCKET" - "V2_STAGING_BUCKET" ) # Add env vars which are passed down into the container here. pass_down_envvars+=( + "NOX_SESSION" + ############### + # Docs builds + ############### "STAGING_BUCKET" "V2_STAGING_BUCKET" - "NOX_SESSION" + ################## + # Samples builds + ################## + "INSTALL_LIBRARY_FROM_SOURCE" + "RUN_TESTS_SESSION" + "BUILD_SPECIFIC_GCLOUD_PROJECT" + # Target directories. + "RUN_TESTS_DIRS" + # The nox session to run. + "RUN_TESTS_SESSION" ) # Prevent unintentional override on the default image. From ad336fd803e68747123c3ac859319e1b77400634 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 9 Oct 2021 16:40:29 +0000 Subject: [PATCH 019/210] chore(python): Add kokoro configs for python 3.10 samples testing (#26) --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 2 +- .../.kokoro/samples/python3.10/common.cfg | 40 +++++++++++++++++++ .../.kokoro/samples/python3.10/continuous.cfg | 6 +++ .../samples/python3.10/periodic-head.cfg | 11 +++++ .../.kokoro/samples/python3.10/periodic.cfg | 6 +++ .../.kokoro/samples/python3.10/presubmit.cfg | 6 +++ .../db-dtypes/samples/snippets/noxfile.py | 2 +- 7 files changed, 71 insertions(+), 2 deletions(-) create mode 100644 packages/db-dtypes/.kokoro/samples/python3.10/common.cfg create mode 100644 packages/db-dtypes/.kokoro/samples/python3.10/continuous.cfg create mode 100644 packages/db-dtypes/.kokoro/samples/python3.10/periodic-head.cfg create mode 100644 packages/db-dtypes/.kokoro/samples/python3.10/periodic.cfg create mode 100644 packages/db-dtypes/.kokoro/samples/python3.10/presubmit.cfg diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index ee94722ab57b..7d98291cc35f 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:6e7328583be8edd3ba8f35311c76a1ecbc823010279ccb6ab46b7a76e25eafcc + digest: sha256:58f73ba196b5414782605236dd0712a73541b44ff2ff4d3a36ec41092dd6fa5b diff --git a/packages/db-dtypes/.kokoro/samples/python3.10/common.cfg b/packages/db-dtypes/.kokoro/samples/python3.10/common.cfg new file mode 100644 index 000000000000..bcf97b918aac --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/python3.10/common.cfg @@ -0,0 +1,40 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.10" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-310" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-db-dtypes-pandas/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-db-dtypes-pandas/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.10/continuous.cfg b/packages/db-dtypes/.kokoro/samples/python3.10/continuous.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/python3.10/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.10/periodic-head.cfg b/packages/db-dtypes/.kokoro/samples/python3.10/periodic-head.cfg new file mode 100644 index 000000000000..ee3d56408db9 --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/python3.10/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-db-dtypes-pandas/.kokoro/test-samples-against-head.sh" +} diff --git a/packages/db-dtypes/.kokoro/samples/python3.10/periodic.cfg b/packages/db-dtypes/.kokoro/samples/python3.10/periodic.cfg new file mode 100644 index 000000000000..71cd1e597e38 --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/python3.10/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/packages/db-dtypes/.kokoro/samples/python3.10/presubmit.cfg b/packages/db-dtypes/.kokoro/samples/python3.10/presubmit.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/python3.10/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/db-dtypes/samples/snippets/noxfile.py b/packages/db-dtypes/samples/snippets/noxfile.py index 1fd8956fbf01..93a9122cc457 100644 --- a/packages/db-dtypes/samples/snippets/noxfile.py +++ b/packages/db-dtypes/samples/snippets/noxfile.py @@ -87,7 +87,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] +ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] From c336c535d70d040c7c8f963d7331cf61e973f8b2 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Thu, 14 Oct 2021 12:04:45 -0500 Subject: [PATCH 020/210] feat: rename dbtime and dbdate dtypes to avoid future conflicts with pandas (#32) --- packages/db-dtypes/db_dtypes/__init__.py | 4 +- .../samples/snippets/pandas_date_and_time.py | 12 ++---- .../snippets/pandas_date_and_time_test.py | 4 +- packages/db-dtypes/tests/unit/test_arrow.py | 43 +++++++++++-------- packages/db-dtypes/tests/unit/test_date.py | 4 +- packages/db-dtypes/tests/unit/test_dtypes.py | 34 +++++++-------- packages/db-dtypes/tests/unit/test_time.py | 4 +- 7 files changed, 54 insertions(+), 51 deletions(-) diff --git a/packages/db-dtypes/db_dtypes/__init__.py b/packages/db-dtypes/db_dtypes/__init__.py index c2e91a19f194..bce2bf02e791 100644 --- a/packages/db-dtypes/db_dtypes/__init__.py +++ b/packages/db-dtypes/db_dtypes/__init__.py @@ -34,8 +34,8 @@ from db_dtypes import core -date_dtype_name = "date" -time_dtype_name = "time" +date_dtype_name = "dbdate" +time_dtype_name = "dbtime" pandas_release = packaging.version.parse(pandas.__version__).release diff --git a/packages/db-dtypes/samples/snippets/pandas_date_and_time.py b/packages/db-dtypes/samples/snippets/pandas_date_and_time.py index d6b402998333..bcb654d2314b 100644 --- a/packages/db-dtypes/samples/snippets/pandas_date_and_time.py +++ b/packages/db-dtypes/samples/snippets/pandas_date_and_time.py @@ -14,16 +14,13 @@ def pandas_date_and_time(): - # fmt: off # [START bigquery_date_create] import datetime import pandas as pd import db_dtypes # noqa import to register dtypes - dates = pd.Series( - [datetime.date(2021, 9, 17), '2021-9-18'], - dtype='date') + dates = pd.Series([datetime.date(2021, 9, 17), "2021-9-18"], dtype="dbdate") # [END bigquery_date_create] # [START bigquery_date_as_datetime] @@ -33,7 +30,7 @@ def pandas_date_and_time(): # [END bigquery_date_as_datetime] # [START bigquery_date_sub] - dates2 = pd.Series(['2021-1-1', '2021-1-2'], dtype='date') + dates2 = pd.Series(["2021-1-1", "2021-1-2"], dtype="dbdate") diffs = dates - dates2 # [END bigquery_date_sub] @@ -46,9 +43,7 @@ def pandas_date_and_time(): # [END bigquery_date_do] # [START bigquery_time_create] - times = pd.Series( - [datetime.time(1, 2, 3, 456789), '12:00:00.6'], - dtype='time') + times = pd.Series([datetime.time(1, 2, 3, 456789), "12:00:00.6"], dtype="dbtime") # [END bigquery_time_create] # [START bigquery_time_as_timedelta] @@ -67,7 +62,6 @@ def pandas_date_and_time(): combined = dates + times # [END bigquery_combine2_date_time] - # fmt: on return ( dates, diff --git a/packages/db-dtypes/samples/snippets/pandas_date_and_time_test.py b/packages/db-dtypes/samples/snippets/pandas_date_and_time_test.py index b6735c62ebab..6f78240e41c8 100644 --- a/packages/db-dtypes/samples/snippets/pandas_date_and_time_test.py +++ b/packages/db-dtypes/samples/snippets/pandas_date_and_time_test.py @@ -35,7 +35,7 @@ def test_pandas_date_and_time(): combined0, ) = pandas_date_and_time() - assert str(dates.dtype) == "date" + assert str(dates.dtype) == "dbdate" assert list(dates) == [datetime.date(2021, 9, 17), datetime.date(2021, 9, 18)] assert np.array_equal( @@ -45,7 +45,7 @@ def test_pandas_date_and_time(): assert np.array_equal(after, dates.astype("object") + do) assert np.array_equal(before, dates.astype("object") - do) - assert str(times.dtype) == "time" + assert str(times.dtype) == "dbtime" assert list(times) == [ datetime.time(1, 2, 3, 456789), datetime.time(12, 0, 0, 600000), diff --git a/packages/db-dtypes/tests/unit/test_arrow.py b/packages/db-dtypes/tests/unit/test_arrow.py index dd0aed78791f..d3745ea777ed 100644 --- a/packages/db-dtypes/tests/unit/test_arrow.py +++ b/packages/db-dtypes/tests/unit/test_arrow.py @@ -25,14 +25,14 @@ @pytest.mark.parametrize( ("series", "expected"), ( - (pandas.Series([], dtype="date"), pyarrow.array([], type=pyarrow.date32())), + (pandas.Series([], dtype="dbdate"), pyarrow.array([], type=pyarrow.date32())), ( - pandas.Series([None, None, None], dtype="date"), + pandas.Series([None, None, None], dtype="dbdate"), pyarrow.array([None, None, None], type=pyarrow.date32()), ), ( pandas.Series( - [dt.date(2021, 9, 27), None, dt.date(2011, 9, 27)], dtype="date" + [dt.date(2021, 9, 27), None, dt.date(2011, 9, 27)], dtype="dbdate" ), pyarrow.array( [dt.date(2021, 9, 27), None, dt.date(2011, 9, 27)], @@ -42,21 +42,25 @@ ( pandas.Series( [dt.date(1677, 9, 22), dt.date(1970, 1, 1), dt.date(2262, 4, 11)], - dtype="date", + dtype="dbdate", ), pyarrow.array( [dt.date(1677, 9, 22), dt.date(1970, 1, 1), dt.date(2262, 4, 11)], type=pyarrow.date32(), ), ), - (pandas.Series([], dtype="time"), pyarrow.array([], type=pyarrow.time64("ns"))), ( - pandas.Series([None, None, None], dtype="time"), + pandas.Series([], dtype="dbtime"), + pyarrow.array([], type=pyarrow.time64("ns")), + ), + ( + pandas.Series([None, None, None], dtype="dbtime"), pyarrow.array([None, None, None], type=pyarrow.time64("ns")), ), ( pandas.Series( - [dt.time(0, 0, 0, 0), None, dt.time(23, 59, 59, 999_999)], dtype="time" + [dt.time(0, 0, 0, 0), None, dt.time(23, 59, 59, 999_999)], + dtype="dbtime", ), pyarrow.array( [dt.time(0, 0, 0, 0), None, dt.time(23, 59, 59, 999_999)], @@ -70,7 +74,7 @@ dt.time(12, 30, 15, 125_000), dt.time(23, 59, 59, 999_999), ], - dtype="time", + dtype="dbtime", ), pyarrow.array( [ @@ -91,14 +95,14 @@ def test_to_arrow(series, expected): @pytest.mark.parametrize( ("series", "expected"), ( - (pandas.Series([], dtype="date"), pyarrow.array([], type=pyarrow.date64())), + (pandas.Series([], dtype="dbdate"), pyarrow.array([], type=pyarrow.date64())), ( - pandas.Series([None, None, None], dtype="date"), + pandas.Series([None, None, None], dtype="dbdate"), pyarrow.array([None, None, None], type=pyarrow.date64()), ), ( pandas.Series( - [dt.date(2021, 9, 27), None, dt.date(2011, 9, 27)], dtype="date" + [dt.date(2021, 9, 27), None, dt.date(2011, 9, 27)], dtype="dbdate" ), pyarrow.array( [dt.date(2021, 9, 27), None, dt.date(2011, 9, 27)], @@ -108,21 +112,25 @@ def test_to_arrow(series, expected): ( pandas.Series( [dt.date(1677, 9, 22), dt.date(1970, 1, 1), dt.date(2262, 4, 11)], - dtype="date", + dtype="dbdate", ), pyarrow.array( [dt.date(1677, 9, 22), dt.date(1970, 1, 1), dt.date(2262, 4, 11)], type=pyarrow.date64(), ), ), - (pandas.Series([], dtype="time"), pyarrow.array([], type=pyarrow.time32("ms"))), ( - pandas.Series([None, None, None], dtype="time"), + pandas.Series([], dtype="dbtime"), + pyarrow.array([], type=pyarrow.time32("ms")), + ), + ( + pandas.Series([None, None, None], dtype="dbtime"), pyarrow.array([None, None, None], type=pyarrow.time32("ms")), ), ( pandas.Series( - [dt.time(0, 0, 0, 0), None, dt.time(23, 59, 59, 999_000)], dtype="time" + [dt.time(0, 0, 0, 0), None, dt.time(23, 59, 59, 999_000)], + dtype="dbtime", ), pyarrow.array( [dt.time(0, 0, 0, 0), None, dt.time(23, 59, 59, 999_000)], @@ -131,7 +139,8 @@ def test_to_arrow(series, expected): ), ( pandas.Series( - [dt.time(0, 0, 0, 0), None, dt.time(23, 59, 59, 999_999)], dtype="time" + [dt.time(0, 0, 0, 0), None, dt.time(23, 59, 59, 999_999)], + dtype="dbtime", ), pyarrow.array( [dt.time(0, 0, 0, 0), None, dt.time(23, 59, 59, 999_999)], @@ -145,7 +154,7 @@ def test_to_arrow(series, expected): dt.time(12, 30, 15, 125_000), dt.time(23, 59, 59, 999_999), ], - dtype="time", + dtype="dbtime", ), pyarrow.array( [ diff --git a/packages/db-dtypes/tests/unit/test_date.py b/packages/db-dtypes/tests/unit/test_date.py index 71e704a17570..c919f6d7250f 100644 --- a/packages/db-dtypes/tests/unit/test_date.py +++ b/packages/db-dtypes/tests/unit/test_date.py @@ -38,7 +38,7 @@ ], ) def test_date_parsing(value, expected): - assert pandas.Series([value], dtype="date")[0] == expected + assert pandas.Series([value], dtype="dbdate")[0] == expected @pytest.mark.parametrize( @@ -59,4 +59,4 @@ def test_date_parsing(value, expected): ) def test_date_parsing_errors(value, error): with pytest.raises(ValueError, match=error): - pandas.Series([value], dtype="date") + pandas.Series([value], dtype="dbdate") diff --git a/packages/db-dtypes/tests/unit/test_dtypes.py b/packages/db-dtypes/tests/unit/test_dtypes.py index a514c475b4aa..aacbf0b464c9 100644 --- a/packages/db-dtypes/tests/unit/test_dtypes.py +++ b/packages/db-dtypes/tests/unit/test_dtypes.py @@ -23,17 +23,17 @@ pandas_release = packaging.version.parse(pd.__version__).release SAMPLE_RAW_VALUES = dict( - date=(datetime.date(2021, 2, 2), "2021-2-3", None), - time=(datetime.time(1, 2, 2), "1:2:3.5", None), + dbdate=(datetime.date(2021, 2, 2), "2021-2-3", None), + dbtime=(datetime.time(1, 2, 2), "1:2:3.5", None), ) SAMPLE_VALUES = dict( - date=( + dbdate=( datetime.date(2021, 2, 2), datetime.date(2021, 2, 3), datetime.date(2021, 2, 4), datetime.date(2021, 2, 5), ), - time=( + dbtime=( datetime.time(1, 2, 2), datetime.time(1, 2, 3, 500000), datetime.time(1, 2, 4, 500000), @@ -41,13 +41,13 @@ ), ) SAMPLE_DT_VALUES = dict( - date=( + dbdate=( "2021-02-02T00:00:00.000000", "2021-02-03T00:00:00.000000", "2021-02-04T00:00:00.000000", "2021-02-05T00:00:00.000000", ), - time=( + dbtime=( "1970-01-01T01:02:02.000000", "1970-01-01T01:02:03.500000", "1970-01-01T01:02:04.500000", @@ -55,7 +55,7 @@ ), ) -for_date_and_time = pytest.mark.parametrize("dtype", ["date", "time"]) +for_date_and_time = pytest.mark.parametrize("dtype", ["dbdate", "dbtime"]) def eq_na(a1, a2): @@ -72,7 +72,7 @@ def register_dtype(): def _cls(dtype): import db_dtypes - return getattr(db_dtypes, dtype.capitalize() + "Array") + return getattr(db_dtypes, dtype[2:].capitalize() + "Array") def _make_one(dtype): @@ -322,7 +322,7 @@ def test_take(dtype, allow_fill, fill_value): if fill_value == 42: fill_value = expected_fill = ( datetime.date(1971, 4, 2) - if dtype == "date" + if dtype == "dbdate" else datetime.time(0, 42, 42, 424242) ) else: @@ -441,7 +441,7 @@ def test_astype_copy(dtype): ], ) def test_asdatetime(dtype, same): - a = _make_one("date") + a = _make_one("dbdate") for dt in dtype, np.dtype(dtype) if dtype != "datetime" else dtype: if same: b = a.astype(dt, copy=False) @@ -480,7 +480,7 @@ def test_astimedelta(dtype): .astype("timedelta64[ns]" if dtype == "timedelta" else dtype) ) - a = _cls("time")([t, None]) + a = _cls("dbtime")([t, None]) b = a.astype(dtype) np.array_equal(b[:1], expect) assert pd.isna(b[1]) and str(b[1]) == "NaT" @@ -526,7 +526,7 @@ def test_min_max_median(dtype): if pandas_release >= (1, 2): assert ( a.median() == datetime.time(1, 2, 4) - if dtype == "time" + if dtype == "dbtime" else datetime.date(2021, 2, 3) ) @@ -553,14 +553,14 @@ def test_min_max_median(dtype): if pandas_release >= (1, 2): assert ( a.median() == datetime.time(1, 2, 2, 750000) - if dtype == "time" + if dtype == "dbtime" else datetime.date(2021, 2, 2) ) def test_date_add(): - dates = _cls("date")(SAMPLE_VALUES["date"]) - times = _cls("time")(SAMPLE_VALUES["time"]) + dates = _cls("dbdate")(SAMPLE_VALUES["dbdate"]) + times = _cls("dbtime")(SAMPLE_VALUES["dbtime"]) expect = dates.astype("datetime64") + times.astype("timedelta64") assert np.array_equal(dates + times, expect) @@ -592,8 +592,8 @@ def test_date_add(): def test_date_sub(): - dates = _cls("date")(SAMPLE_VALUES["date"]) - dates2 = _cls("date")( + dates = _cls("dbdate")(SAMPLE_VALUES["dbdate"]) + dates2 = _cls("dbdate")( ( datetime.date(2021, 1, 2), datetime.date(2021, 1, 3), diff --git a/packages/db-dtypes/tests/unit/test_time.py b/packages/db-dtypes/tests/unit/test_time.py index 4a6adc816afb..ba459499a1cb 100644 --- a/packages/db-dtypes/tests/unit/test_time.py +++ b/packages/db-dtypes/tests/unit/test_time.py @@ -61,7 +61,7 @@ ], ) def test_time_parsing(value, expected): - assert pandas.Series([value], dtype="time")[0] == expected + assert pandas.Series([value], dtype="dbtime")[0] == expected @pytest.mark.parametrize( @@ -81,4 +81,4 @@ def test_time_parsing(value, expected): ) def test_time_parsing_errors(value, error): with pytest.raises(ValueError, match=error): - pandas.Series([value], dtype="time") + pandas.Series([value], dtype="dbtime") From 16bf63887977f423181ec07ea1a6d10aa1f8583f Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Thu, 14 Oct 2021 13:54:56 -0500 Subject: [PATCH 021/210] docs: add how-to guide and include API reference (#33) * docs: add how-to guide and include API reference * fix indentation * fix types in sample * fix types in sample --- packages/db-dtypes/README.rst | 15 ---- packages/db-dtypes/docs/conf.py | 2 +- packages/db-dtypes/docs/index.rst | 9 +++ packages/db-dtypes/docs/reference.rst | 2 +- packages/db-dtypes/docs/samples | 1 + packages/db-dtypes/docs/usage.rst | 80 +++++++++++++++++++ packages/db-dtypes/owlbot.py | 4 +- .../samples/snippets/pandas_date_and_time.py | 35 ++++---- 8 files changed, 112 insertions(+), 36 deletions(-) create mode 120000 packages/db-dtypes/docs/samples create mode 100644 packages/db-dtypes/docs/usage.rst diff --git a/packages/db-dtypes/README.rst b/packages/db-dtypes/README.rst index 5ea347cdd126..a14a61db5e76 100644 --- a/packages/db-dtypes/README.rst +++ b/packages/db-dtypes/README.rst @@ -18,21 +18,6 @@ Pandas Data Types for SQL systems (BigQuery, Spanner) .. _Library Documentation: https://googleapis.dev/python/db-dtypes/latest -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. [Optional] `Enable billing for your project.`_ -3. `Enable the BigQuery Storage API.`_ -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Enable the BigQuery Storage API.: https://console.cloud.google.com/apis/library/bigquery.googleapis.com -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - Installation ------------ diff --git a/packages/db-dtypes/docs/conf.py b/packages/db-dtypes/docs/conf.py index 9482b9748266..5cf73ba1c1c3 100644 --- a/packages/db-dtypes/docs/conf.py +++ b/packages/db-dtypes/docs/conf.py @@ -351,7 +351,7 @@ "grpc": ("https://grpc.github.io/grpc/python/", None), "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), - "pandas": ("http://pandas.pydata.org/pandas-docs/dev", None), + "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/packages/db-dtypes/docs/index.rst b/packages/db-dtypes/docs/index.rst index f5f0b6f845a9..ca497d59a07d 100644 --- a/packages/db-dtypes/docs/index.rst +++ b/packages/db-dtypes/docs/index.rst @@ -1,5 +1,14 @@ .. include:: README.rst +How-to Guides +------------- + +.. toctree:: + :maxdepth: 2 + + usage + + API Reference ------------- diff --git a/packages/db-dtypes/docs/reference.rst b/packages/db-dtypes/docs/reference.rst index df1541d7d9e5..fc9efb615c53 100644 --- a/packages/db-dtypes/docs/reference.rst +++ b/packages/db-dtypes/docs/reference.rst @@ -1,4 +1,4 @@ API Reference ^^^^^^^^^^^^^ -.. automodule:: db_dtypes.version +.. automodule:: db_dtypes diff --git a/packages/db-dtypes/docs/samples b/packages/db-dtypes/docs/samples new file mode 120000 index 000000000000..e804737ed3a9 --- /dev/null +++ b/packages/db-dtypes/docs/samples @@ -0,0 +1 @@ +../samples \ No newline at end of file diff --git a/packages/db-dtypes/docs/usage.rst b/packages/db-dtypes/docs/usage.rst new file mode 100644 index 000000000000..5ecc5620592c --- /dev/null +++ b/packages/db-dtypes/docs/usage.rst @@ -0,0 +1,80 @@ +Using the db-dtypes package +--------------------------- + +Importing the :mod:`db_dtypes` module registers the extension dtypes for use +in pandas. + +Construct a date :class:`~pandas.Series` with strings in ``YYYY-MM-DD`` format +or :class:`datetime.date` objects. + +.. literalinclude:: samples/snippets/pandas_date_and_time.py + :language: python + :dedent: 4 + :start-after: [START bigquery_pandas_date_create] + :end-before: [END bigquery_pandas_date_create] + +Working with dates +^^^^^^^^^^^^^^^^^^ + +Convert a date :class:`~pandas.Series` to a ``datetime64`` Series with +:meth:`~pandas.Series.astype`. The resulting values use midnight as the +time part. + +.. literalinclude:: samples/snippets/pandas_date_and_time.py + :language: python + :dedent: 4 + :start-after: [START bigquery_pandas_date_as_datetime] + :end-before: [END bigquery_pandas_date_as_datetime] + +Just like ``datetime64`` values, date values can be subtracted. This is +equivalent to first converting to ``datetime64`` and then subtracting. + +.. literalinclude:: samples/snippets/pandas_date_and_time.py + :language: python + :dedent: 4 + :start-after: [START bigquery_pandas_date_sub] + :end-before: [END bigquery_pandas_date_sub] + +Just like ``datetime64`` values, :class:`~pandas.tseries.offsets.DateOffset` +values can be added to them. + +.. literalinclude:: samples/snippets/pandas_date_and_time.py + :language: python + :dedent: 4 + :start-after: [START bigquery_pandas_date_add_offset] + :end-before: [END bigquery_pandas_date_add_offset] + + +Working with times +^^^^^^^^^^^^^^^^^^ + +Construct a time :class:`~pandas.Series` with strings in ``HH:MM:SS.fraction`` +24-hour format or :class:`datetime.time` objects. + +.. literalinclude:: samples/snippets/pandas_date_and_time.py + :language: python + :dedent: 4 + :start-after: [START bigquery_pandas_time_create] + :end-before: [END bigquery_pandas_time_create] + +Convert a time :class:`~pandas.Series` to a ``timedelta64`` Series with +:meth:`~pandas.Series.astype`. + +.. literalinclude:: samples/snippets/pandas_date_and_time.py + :language: python + :dedent: 4 + :start-after: [START bigquery_pandas_time_as_timedelta] + :end-before: [END bigquery_pandas_time_as_timedelta] + + +Combining dates and times +^^^^^^^^^^^^^^^^^^^^^^^^^ + +Combine a date :class:`~pandas.Series` with a time :class:`~pandas.Series` to +create a ``datetime64`` :class:`~pandas.Series`. + + .. literalinclude:: samples/snippets/pandas_date_and_time.py + :language: python + :dedent: 4 + :start-after: [START bigquery_pandas_combine_date_time] + :end-before: [END bigquery_pandas_combine_date_time] diff --git a/packages/db-dtypes/owlbot.py b/packages/db-dtypes/owlbot.py index 176e179e6c91..e6c264c2c132 100644 --- a/packages/db-dtypes/owlbot.py +++ b/packages/db-dtypes/owlbot.py @@ -31,7 +31,9 @@ unit_test_python_versions=["3.6", "3.7", "3.8", "3.9"], system_test_python_versions=["3.8"], cov_level=100, - intersphinx_dependencies={"pandas": "http://pandas.pydata.org/pandas-docs/dev"}, + intersphinx_dependencies={ + "pandas": "https://pandas.pydata.org/pandas-docs/stable/" + }, ) s.move(templated_files, excludes=["docs/multiprocessing.rst"]) diff --git a/packages/db-dtypes/samples/snippets/pandas_date_and_time.py b/packages/db-dtypes/samples/snippets/pandas_date_and_time.py index bcb654d2314b..3292e6cc82a5 100644 --- a/packages/db-dtypes/samples/snippets/pandas_date_and_time.py +++ b/packages/db-dtypes/samples/snippets/pandas_date_and_time.py @@ -14,7 +14,7 @@ def pandas_date_and_time(): - # [START bigquery_date_create] + # [START bigquery_pandas_date_create] import datetime import pandas as pd @@ -22,46 +22,45 @@ def pandas_date_and_time(): dates = pd.Series([datetime.date(2021, 9, 17), "2021-9-18"], dtype="dbdate") - # [END bigquery_date_create] - # [START bigquery_date_as_datetime] + # [END bigquery_pandas_date_create] + # [START bigquery_pandas_date_as_datetime] datetimes = dates.astype("datetime64") - # [END bigquery_date_as_datetime] - # [START bigquery_date_sub] + # [END bigquery_pandas_date_as_datetime] + # [START bigquery_pandas_date_sub] dates2 = pd.Series(["2021-1-1", "2021-1-2"], dtype="dbdate") diffs = dates - dates2 - # [END bigquery_date_sub] - # [START bigquery_date_do] + # [END bigquery_pandas_date_sub] + # [START bigquery_pandas_date_add_offset] do = pd.DateOffset(days=1) after = dates + do before = dates - do - # [END bigquery_date_do] - # [START bigquery_time_create] + # [END bigquery_pandas_date_add_offset] + # [START bigquery_pandas_time_create] times = pd.Series([datetime.time(1, 2, 3, 456789), "12:00:00.6"], dtype="dbtime") - # [END bigquery_time_create] - # [START bigquery_time_as_timedelta] + # [END bigquery_pandas_time_create] + # [START bigquery_pandas_time_as_timedelta] timedeltas = times.astype("timedelta64") - # [END bigquery_time_as_timedelta] - # [START bigquery_combine_date_time] + # [END bigquery_pandas_time_as_timedelta] - combined = datetimes + timedeltas + # Combine datetime64 and timedelta64 to confirm adding dates and times are + # equivalent. + combined0 = datetimes + timedeltas - # [END bigquery_combine_date_time] - combined0 = combined - # [START bigquery_combine2_date_time] + # [START bigquery_pandas_combine_date_time] combined = dates + times - # [END bigquery_combine2_date_time] + # [END bigquery_pandas_combine_date_time] return ( dates, From 6d6d28d2475219315456a710c7de92eb0cf4f16d Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 25 Oct 2021 09:15:29 -0500 Subject: [PATCH 022/210] chore: release 0.2.0 (#34) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/db-dtypes/CHANGELOG.md | 12 ++++++++++++ packages/db-dtypes/db_dtypes/version.py | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/packages/db-dtypes/CHANGELOG.md b/packages/db-dtypes/CHANGELOG.md index c0d8b28ceaaf..0b78ab38bd5d 100644 --- a/packages/db-dtypes/CHANGELOG.md +++ b/packages/db-dtypes/CHANGELOG.md @@ -1,5 +1,17 @@ # Changelog +## [0.2.0](https://www.github.com/googleapis/python-db-dtypes-pandas/compare/v0.1.1...v0.2.0) (2021-10-14) + + +### Features + +* rename dbtime and dbdate dtypes to avoid future conflicts with pandas ([#32](https://www.github.com/googleapis/python-db-dtypes-pandas/issues/32)) ([50ea0f7](https://www.github.com/googleapis/python-db-dtypes-pandas/commit/50ea0f798548aa2f0516f6afc93ba6e80cc0e6d9)) + + +### Documentation + +* add how-to guide and include API reference ([#33](https://www.github.com/googleapis/python-db-dtypes-pandas/issues/33)) ([878dce4](https://www.github.com/googleapis/python-db-dtypes-pandas/commit/878dce48bd6714706a2a829775ce00e61724fc7a)) + ### [0.1.1](https://www.github.com/googleapis/python-db-dtypes-pandas/compare/v0.1.0...v0.1.1) (2021-10-04) diff --git a/packages/db-dtypes/db_dtypes/version.py b/packages/db-dtypes/db_dtypes/version.py index 9cbc9e817f0f..4da46cc4f5da 100644 --- a/packages/db-dtypes/db_dtypes/version.py +++ b/packages/db-dtypes/db_dtypes/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "0.1.1" +__version__ = "0.2.0" From 0d875d615b48baafae824b359edcfd358d850143 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 25 Oct 2021 16:31:36 -0400 Subject: [PATCH 023/210] chore(python): push cloud library docs to staging bucket for Cloud RAD (#35) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(python): push cloud library docs to staging bucket for Cloud RAD Source-Link: https://github.com/googleapis/synthtool/commit/7fd61f8efae782a7cfcecc599faf52f9737fe584 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:4ee57a76a176ede9087c14330c625a71553cf9c72828b2c0ca12f5338171ba60 * update replacement in owlbot.py * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 2 +- packages/db-dtypes/.kokoro/docs/common.cfg | 4 +++- packages/db-dtypes/owlbot.py | 2 +- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 7d98291cc35f..108063d4dee4 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:58f73ba196b5414782605236dd0712a73541b44ff2ff4d3a36ec41092dd6fa5b + digest: sha256:4ee57a76a176ede9087c14330c625a71553cf9c72828b2c0ca12f5338171ba60 diff --git a/packages/db-dtypes/.kokoro/docs/common.cfg b/packages/db-dtypes/.kokoro/docs/common.cfg index abd7ad73d60d..c790a9abfc0c 100644 --- a/packages/db-dtypes/.kokoro/docs/common.cfg +++ b/packages/db-dtypes/.kokoro/docs/common.cfg @@ -30,7 +30,9 @@ env_vars: { env_vars: { key: "V2_STAGING_BUCKET" - value: "docs-staging-v2" + # Push non-cloud library docs to `docs-staging-v2-staging` instead of the + # Cloud RAD bucket `docs-staging-v2` + value: "docs-staging-v2-staging" } # It will upload the docker image after successful builds. diff --git a/packages/db-dtypes/owlbot.py b/packages/db-dtypes/owlbot.py index e6c264c2c132..eae82e96817c 100644 --- a/packages/db-dtypes/owlbot.py +++ b/packages/db-dtypes/owlbot.py @@ -50,7 +50,7 @@ ) s.replace( - ["noxfile.py"], "google/cloud", "db_dtypes", + ["noxfile.py"], "--cov=google", "--cov=db_dtypes", ) # There are no system tests for this package. From 398b20c9d8d661787143992b70c2b631e56b0f6e Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 25 Oct 2021 21:28:47 -0400 Subject: [PATCH 024/210] chore(python): omit google/__init__.py in coverage (#36) Source-Link: https://github.com/googleapis/synthtool/commit/694118b039b09551fb5d445fceb361a7dbb06400 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:ec49167c606648a063d1222220b48119c912562849a0528f35bfb592a9f72737 Co-authored-by: Owl Bot --- packages/db-dtypes/.coveragerc | 1 + packages/db-dtypes/.github/.OwlBot.lock.yaml | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/db-dtypes/.coveragerc b/packages/db-dtypes/.coveragerc index ad02e9bc3154..0f8f9058e323 100644 --- a/packages/db-dtypes/.coveragerc +++ b/packages/db-dtypes/.coveragerc @@ -18,6 +18,7 @@ [run] branch = True omit = + google/__init__.py db_dtypes/requirements.py [report] diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 108063d4dee4..cb89b2e326b7 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:4ee57a76a176ede9087c14330c625a71553cf9c72828b2c0ca12f5338171ba60 + digest: sha256:ec49167c606648a063d1222220b48119c912562849a0528f35bfb592a9f72737 From cab280a622d2e4e842ae9749d7348500518f6f34 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 26 Oct 2021 23:00:17 +0200 Subject: [PATCH 025/210] chore(deps): update dependency pyarrow to v6 (#37) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [pyarrow](https://arrow.apache.org/) | ` >= 3.0.0, < 6.0dev` -> `>=3.0.0, <6.1` | [![age](https://badges.renovateapi.com/packages/pypi/pyarrow/6.0.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/pyarrow/6.0.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/pyarrow/6.0.0/compatibility-slim/5.0.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/pyarrow/6.0.0/confidence-slim/5.0.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, click this checkbox. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/python-db-dtypes-pandas). --- packages/db-dtypes/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/db-dtypes/setup.py b/packages/db-dtypes/setup.py index 8e1e3558882d..19377e51d837 100644 --- a/packages/db-dtypes/setup.py +++ b/packages/db-dtypes/setup.py @@ -31,7 +31,7 @@ dependencies = [ "packaging >= 17.0", "pandas >= 0.24.2, < 2.0dev", - "pyarrow >= 3.0.0, < 6.0dev", + "pyarrow>=3.0.0, <7.0dev", "numpy >= 1.16.6, < 2.0dev", ] From 79373e54f7665f179025ac2dbaa45847bdc9f8e5 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Thu, 4 Nov 2021 11:18:30 -0500 Subject: [PATCH 026/210] feat: support Python 3.10 (#40) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: support Python 3.10 * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- packages/db-dtypes/CONTRIBUTING.rst | 6 ++++-- packages/db-dtypes/noxfile.py | 2 +- packages/db-dtypes/owlbot.py | 2 +- packages/db-dtypes/setup.py | 3 ++- packages/db-dtypes/testing/constraints-3.11.txt | 0 5 files changed, 8 insertions(+), 5 deletions(-) create mode 100644 packages/db-dtypes/testing/constraints-3.11.txt diff --git a/packages/db-dtypes/CONTRIBUTING.rst b/packages/db-dtypes/CONTRIBUTING.rst index f066db35b18b..22f6382c7032 100644 --- a/packages/db-dtypes/CONTRIBUTING.rst +++ b/packages/db-dtypes/CONTRIBUTING.rst @@ -22,7 +22,7 @@ In order to add a feature: documentation. - The feature must work fully on the following CPython versions: - 3.6, 3.7, 3.8 and 3.9 on both UNIX and Windows. + 3.6, 3.7, 3.8, 3.9 and 3.10 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -72,7 +72,7 @@ We use `nox `__ to instrument our tests. - To run a single unit test:: - $ nox -s unit-3.9 -- -k + $ nox -s unit-3.10 -- -k .. note:: @@ -225,11 +225,13 @@ We support: - `Python 3.7`_ - `Python 3.8`_ - `Python 3.9`_ +- `Python 3.10`_ .. _Python 3.6: https://docs.python.org/3.6/ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ .. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ Supported versions can be found in our ``noxfile.py`` `config`_. diff --git a/packages/db-dtypes/noxfile.py b/packages/db-dtypes/noxfile.py index f2a2bedf0abe..5f48361032c0 100644 --- a/packages/db-dtypes/noxfile.py +++ b/packages/db-dtypes/noxfile.py @@ -29,7 +29,7 @@ DEFAULT_PYTHON_VERSION = "3.8" SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] -UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] +UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() diff --git a/packages/db-dtypes/owlbot.py b/packages/db-dtypes/owlbot.py index eae82e96817c..30f3b3d7cf2e 100644 --- a/packages/db-dtypes/owlbot.py +++ b/packages/db-dtypes/owlbot.py @@ -28,7 +28,7 @@ # Add templated files # ---------------------------------------------------------------------------- templated_files = common.py_library( - unit_test_python_versions=["3.6", "3.7", "3.8", "3.9"], + unit_test_python_versions=["3.6", "3.7", "3.8", "3.9", "3.10"], system_test_python_versions=["3.8"], cov_level=100, intersphinx_dependencies={ diff --git a/packages/db-dtypes/setup.py b/packages/db-dtypes/setup.py index 19377e51d837..8def678a1ee4 100644 --- a/packages/db-dtypes/setup.py +++ b/packages/db-dtypes/setup.py @@ -66,11 +66,12 @@ def readme(): "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", "Operating System :: OS Independent", "Topic :: Database :: Front-Ends", ], platforms="Posix; MacOS X; Windows", install_requires=dependencies, - python_requires=">=3.6, <3.10", + python_requires=">=3.6, <3.11", tests_require=["pytest"], ) diff --git a/packages/db-dtypes/testing/constraints-3.11.txt b/packages/db-dtypes/testing/constraints-3.11.txt new file mode 100644 index 000000000000..e69de29bb2d1 From a791042d4526caca55c0c54250576ca62602ddf5 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Mon, 8 Nov 2021 16:48:53 -0600 Subject: [PATCH 027/210] feat: support conversion from pyarrow RecordBatch to pandas DataFrame (#39) * feat: support conversion from pyarrow RecordBatch to pandas DataFrame * hack together working implementation TODO: add tests for constructing pandas Series with pyarrow scalars * fix unit test coverage, optimize arrow to numpy conversion * apply same optimizations to to_arrow conversion * remove redundant to_numpy now that to_arrow doesn't use it * be explicit about chunked array vs array * add docstrings to arrow conversion functions * add test case for round-trip to/from pyarrow nanosecond-precision time scalars * add time32("ms") test case without nulls for completeness --- packages/db-dtypes/db_dtypes/__init__.py | 109 +++++- packages/db-dtypes/db_dtypes/core.py | 8 +- packages/db-dtypes/tests/unit/test_arrow.py | 404 ++++++++++++++------ 3 files changed, 375 insertions(+), 146 deletions(-) diff --git a/packages/db-dtypes/db_dtypes/__init__.py b/packages/db-dtypes/db_dtypes/__init__.py index bce2bf02e791..f1424fb60d67 100644 --- a/packages/db-dtypes/db_dtypes/__init__.py +++ b/packages/db-dtypes/db_dtypes/__init__.py @@ -17,6 +17,7 @@ import datetime import re +from typing import Union import numpy import packaging.version @@ -29,6 +30,7 @@ import pandas.core.dtypes.generic import pandas.core.nanops import pyarrow +import pyarrow.compute from db_dtypes.version import __version__ from db_dtypes import core @@ -36,6 +38,8 @@ date_dtype_name = "dbdate" time_dtype_name = "dbtime" +_EPOCH = datetime.datetime(1970, 1, 1) +_NPEPOCH = numpy.datetime64(_EPOCH) pandas_release = packaging.version.parse(pandas.__version__).release @@ -52,6 +56,33 @@ class TimeDtype(core.BaseDatetimeDtype): def construct_array_type(self): return TimeArray + @staticmethod + def __from_arrow__( + array: Union[pyarrow.Array, pyarrow.ChunkedArray] + ) -> "TimeArray": + """Convert to dbtime data from an Arrow array. + + See: + https://pandas.pydata.org/pandas-docs/stable/development/extending.html#compatibility-with-apache-arrow + """ + # We can't call combine_chunks on an empty array, so short-circuit the + # rest of the function logic for this special case. + if len(array) == 0: + return TimeArray(numpy.array([], dtype="datetime64[ns]")) + + # We can't cast to timestamp("ns"), but time64("ns") has the same + # memory layout: 64-bit integers representing the number of nanoseconds + # since the datetime epoch (midnight 1970-01-01). + array = pyarrow.compute.cast(array, pyarrow.time64("ns")) + + # ChunkedArray has no "view" method, so combine into an Array. + if isinstance(array, pyarrow.ChunkedArray): + array = array.combine_chunks() + + array = array.view(pyarrow.timestamp("ns")) + np_array = array.to_numpy(zero_copy_only=False) + return TimeArray(np_array) + class TimeArray(core.BaseDatetimeArray): """ @@ -61,8 +92,6 @@ class TimeArray(core.BaseDatetimeArray): # Data are stored as datetime64 values with a date of Jan 1, 1970 dtype = TimeDtype() - _epoch = datetime.datetime(1970, 1, 1) - _npepoch = numpy.datetime64(_epoch) @classmethod def _datetime( @@ -75,8 +104,21 @@ def _datetime( r"(?:\.(?P\d*))?)?)?\s*$" ).match, ): - if isinstance(scalar, datetime.time): - return datetime.datetime.combine(cls._epoch, scalar) + # Convert pyarrow values to datetime.time. + if isinstance(scalar, (pyarrow.Time32Scalar, pyarrow.Time64Scalar)): + scalar = ( + scalar.cast(pyarrow.time64("ns")) + .cast(pyarrow.int64()) + .cast(pyarrow.timestamp("ns")) + .as_py() + ) + + if scalar is None: + return None + elif isinstance(scalar, datetime.time): + return datetime.datetime.combine(_EPOCH, scalar) + elif isinstance(scalar, pandas.Timestamp): + return scalar.to_datetime64() elif isinstance(scalar, str): # iso string parsed = match_fn(scalar) @@ -113,7 +155,7 @@ def _box_func(self, x): __return_deltas = {"timedelta", "timedelta64", "timedelta64[ns]", " "DateArray": + """Convert to dbdate data from an Arrow array. + + See: + https://pandas.pydata.org/pandas-docs/stable/development/extending.html#compatibility-with-apache-arrow + """ + array = pyarrow.compute.cast(array, pyarrow.timestamp("ns")) + np_array = array.to_numpy() + return DateArray(np_array) + class DateArray(core.BaseDatetimeArray): """ @@ -161,7 +226,13 @@ def _datetime( scalar, match_fn=re.compile(r"\s*(?P\d+)-(?P\d+)-(?P\d+)\s*$").match, ): - if isinstance(scalar, datetime.date): + # Convert pyarrow values to datetime.date. + if isinstance(scalar, (pyarrow.Date32Scalar, pyarrow.Date64Scalar)): + scalar = scalar.as_py() + + if scalar is None: + return None + elif isinstance(scalar, datetime.date): return datetime.datetime(scalar.year, scalar.month, scalar.day) elif isinstance(scalar, str): match = match_fn(scalar) @@ -197,8 +268,14 @@ def astype(self, dtype, copy=True): return super().astype(dtype, copy=copy) def __arrow_array__(self, type=None): - return pyarrow.array( - self._ndarray, type=type if type is not None else pyarrow.date32(), + """Convert to an Arrow array from dbdate data. + + See: + https://pandas.pydata.org/pandas-docs/stable/development/extending.html#compatibility-with-apache-arrow + """ + array = pyarrow.array(self._ndarray, type=pyarrow.timestamp("ns")) + return pyarrow.compute.cast( + array, type if type is not None else pyarrow.date32(), ) def __add__(self, other): @@ -206,7 +283,7 @@ def __add__(self, other): return self.astype("object") + other if isinstance(other, TimeArray): - return (other._ndarray - other._npepoch) + self._ndarray + return (other._ndarray - _NPEPOCH) + self._ndarray return super().__add__(other) diff --git a/packages/db-dtypes/db_dtypes/core.py b/packages/db-dtypes/db_dtypes/core.py index fbc784efe993..c8f3ad482a00 100644 --- a/packages/db-dtypes/db_dtypes/core.py +++ b/packages/db-dtypes/db_dtypes/core.py @@ -17,6 +17,7 @@ import numpy import pandas from pandas._libs import NaT +import pandas.api.extensions import pandas.compat.numpy.function import pandas.core.algorithms import pandas.core.arrays @@ -32,7 +33,7 @@ pandas_release = pandas_backports.pandas_release -class BaseDatetimeDtype(pandas.core.dtypes.base.ExtensionDtype): +class BaseDatetimeDtype(pandas.api.extensions.ExtensionDtype): na_value = NaT kind = "o" names = None @@ -60,10 +61,7 @@ def __init__(self, values, dtype=None, copy: bool = False): @classmethod def __ndarray(cls, scalars): - return numpy.array( - [None if scalar is None else cls._datetime(scalar) for scalar in scalars], - "M8[ns]", - ) + return numpy.array([cls._datetime(scalar) for scalar in scalars], "M8[ns]",) @classmethod def _from_sequence(cls, scalars, *, dtype=None, copy=False): diff --git a/packages/db-dtypes/tests/unit/test_arrow.py b/packages/db-dtypes/tests/unit/test_arrow.py index d3745ea777ed..5f45a90c1b61 100644 --- a/packages/db-dtypes/tests/unit/test_arrow.py +++ b/packages/db-dtypes/tests/unit/test_arrow.py @@ -13,160 +13,314 @@ # limitations under the License. import datetime as dt +from typing import Optional import pandas +import pandas.api.extensions +import pandas.testing import pyarrow import pytest -# To register the types. -import db_dtypes # noqa +import db_dtypes -@pytest.mark.parametrize( - ("series", "expected"), +SECOND_NANOS = 1_000_000_000 +MINUTE_NANOS = 60 * SECOND_NANOS +HOUR_NANOS = 60 * MINUTE_NANOS + + +def types_mapper( + pyarrow_type: pyarrow.DataType, +) -> Optional[pandas.api.extensions.ExtensionDtype]: + type_str = str(pyarrow_type) + + if type_str.startswith("date32") or type_str.startswith("date64"): + return db_dtypes.DateDtype + elif type_str.startswith("time32") or type_str.startswith("time64"): + return db_dtypes.TimeDtype + else: + # Use default type mapping. + return None + + +SERIES_ARRAYS_DEFAULT_TYPES = [ + (pandas.Series([], dtype="dbdate"), pyarrow.array([], type=pyarrow.date32())), ( - (pandas.Series([], dtype="dbdate"), pyarrow.array([], type=pyarrow.date32())), - ( - pandas.Series([None, None, None], dtype="dbdate"), - pyarrow.array([None, None, None], type=pyarrow.date32()), - ), - ( - pandas.Series( - [dt.date(2021, 9, 27), None, dt.date(2011, 9, 27)], dtype="dbdate" - ), - pyarrow.array( - [dt.date(2021, 9, 27), None, dt.date(2011, 9, 27)], - type=pyarrow.date32(), - ), + pandas.Series([None, None, None], dtype="dbdate"), + pyarrow.array([None, None, None], type=pyarrow.date32()), + ), + ( + pandas.Series( + [dt.date(2021, 9, 27), None, dt.date(2011, 9, 27)], dtype="dbdate" ), - ( - pandas.Series( - [dt.date(1677, 9, 22), dt.date(1970, 1, 1), dt.date(2262, 4, 11)], - dtype="dbdate", - ), - pyarrow.array( - [dt.date(1677, 9, 22), dt.date(1970, 1, 1), dt.date(2262, 4, 11)], - type=pyarrow.date32(), - ), + pyarrow.array( + [dt.date(2021, 9, 27), None, dt.date(2011, 9, 27)], type=pyarrow.date32(), ), - ( - pandas.Series([], dtype="dbtime"), - pyarrow.array([], type=pyarrow.time64("ns")), + ), + ( + pandas.Series( + [dt.date(1677, 9, 22), dt.date(1970, 1, 1), dt.date(2262, 4, 11)], + dtype="dbdate", ), - ( - pandas.Series([None, None, None], dtype="dbtime"), - pyarrow.array([None, None, None], type=pyarrow.time64("ns")), + pyarrow.array( + [dt.date(1677, 9, 22), dt.date(1970, 1, 1), dt.date(2262, 4, 11)], + type=pyarrow.date32(), ), - ( - pandas.Series( - [dt.time(0, 0, 0, 0), None, dt.time(23, 59, 59, 999_999)], - dtype="dbtime", - ), - pyarrow.array( - [dt.time(0, 0, 0, 0), None, dt.time(23, 59, 59, 999_999)], - type=pyarrow.time64("ns"), - ), + ), + (pandas.Series([], dtype="dbtime"), pyarrow.array([], type=pyarrow.time64("ns")),), + ( + pandas.Series([None, None, None], dtype="dbtime"), + pyarrow.array([None, None, None], type=pyarrow.time64("ns")), + ), + ( + pandas.Series( + [dt.time(0, 0, 0, 0), None, dt.time(23, 59, 59, 999_999)], dtype="dbtime", ), - ( - pandas.Series( - [ - dt.time(0, 0, 0, 0), - dt.time(12, 30, 15, 125_000), - dt.time(23, 59, 59, 999_999), - ], - dtype="dbtime", - ), - pyarrow.array( - [ - dt.time(0, 0, 0, 0), - dt.time(12, 30, 15, 125_000), - dt.time(23, 59, 59, 999_999), - ], - type=pyarrow.time64("ns"), - ), + pyarrow.array( + [dt.time(0, 0, 0, 0), None, dt.time(23, 59, 59, 999_999)], + type=pyarrow.time64("ns"), ), ), -) + ( + pandas.Series( + [ + dt.time(0, 0, 0, 0), + dt.time(12, 30, 15, 125_000), + dt.time(23, 59, 59, 999_999), + ], + dtype="dbtime", + ), + pyarrow.array( + [ + dt.time(0, 0, 0, 0), + dt.time(12, 30, 15, 125_000), + dt.time(23, 59, 59, 999_999), + ], + type=pyarrow.time64("ns"), + ), + ), +] +SERIES_ARRAYS_CUSTOM_ARROW_TYPES = [ + (pandas.Series([], dtype="dbdate"), pyarrow.array([], type=pyarrow.date64())), + ( + pandas.Series([None, None, None], dtype="dbdate"), + pyarrow.array([None, None, None], type=pyarrow.date64()), + ), + ( + pandas.Series( + [dt.date(2021, 9, 27), None, dt.date(2011, 9, 27)], dtype="dbdate" + ), + pyarrow.array( + [dt.date(2021, 9, 27), None, dt.date(2011, 9, 27)], type=pyarrow.date64(), + ), + ), + ( + pandas.Series( + [dt.date(1677, 9, 22), dt.date(1970, 1, 1), dt.date(2262, 4, 11)], + dtype="dbdate", + ), + pyarrow.array( + [dt.date(1677, 9, 22), dt.date(1970, 1, 1), dt.date(2262, 4, 11)], + type=pyarrow.date64(), + ), + ), + (pandas.Series([], dtype="dbtime"), pyarrow.array([], type=pyarrow.time32("ms")),), + ( + pandas.Series([None, None, None], dtype="dbtime"), + pyarrow.array([None, None, None], type=pyarrow.time32("ms")), + ), + ( + pandas.Series( + [dt.time(0, 0, 0, 0), None, dt.time(23, 59, 59, 999_000)], dtype="dbtime", + ), + pyarrow.array( + [dt.time(0, 0, 0, 0), None, dt.time(23, 59, 59, 999_000)], + type=pyarrow.time32("ms"), + ), + ), + ( + pandas.Series( + [ + dt.time(0, 0, 0, 0), + dt.time(12, 30, 15, 125_000), + dt.time(23, 59, 59, 999_000), + ], + dtype="dbtime", + ), + pyarrow.array( + [ + dt.time(0, 0, 0, 0), + dt.time(12, 30, 15, 125_000), + dt.time(23, 59, 59, 999_000), + ], + type=pyarrow.time32("ms"), + ), + ), + ( + pandas.Series( + [dt.time(0, 0, 0, 0), None, dt.time(23, 59, 59, 999_999)], dtype="dbtime", + ), + pyarrow.array( + [dt.time(0, 0, 0, 0), None, dt.time(23, 59, 59, 999_999)], + type=pyarrow.time64("us"), + ), + ), + ( + pandas.Series( + [ + dt.time(0, 0, 0, 0), + dt.time(12, 30, 15, 125_000), + dt.time(23, 59, 59, 999_999), + ], + dtype="dbtime", + ), + pyarrow.array( + [ + dt.time(0, 0, 0, 0), + dt.time(12, 30, 15, 125_000), + dt.time(23, 59, 59, 999_999), + ], + type=pyarrow.time64("us"), + ), + ), + ( + pandas.Series( + [ + # Only microseconds are supported when reading data. See: + # https://github.com/googleapis/python-db-dtypes-pandas/issues/19 + # Still, round-trip with pyarrow nanosecond precision scalars + # is supported. + pyarrow.scalar(0, pyarrow.time64("ns")), + pyarrow.scalar( + 12 * HOUR_NANOS + + 30 * MINUTE_NANOS + + 15 * SECOND_NANOS + + 123_456_789, + pyarrow.time64("ns"), + ), + pyarrow.scalar( + 23 * HOUR_NANOS + + 59 * MINUTE_NANOS + + 59 * SECOND_NANOS + + 999_999_999, + pyarrow.time64("ns"), + ), + ], + dtype="dbtime", + ), + pyarrow.array( + [ + 0, + 12 * HOUR_NANOS + 30 * MINUTE_NANOS + 15 * SECOND_NANOS + 123_456_789, + 23 * HOUR_NANOS + 59 * MINUTE_NANOS + 59 * SECOND_NANOS + 999_999_999, + ], + type=pyarrow.time64("ns"), + ), + ), +] + + +@pytest.mark.parametrize(("series", "expected"), SERIES_ARRAYS_DEFAULT_TYPES) def test_to_arrow(series, expected): array = pyarrow.array(series) assert array.equals(expected) +@pytest.mark.parametrize(("series", "expected"), SERIES_ARRAYS_CUSTOM_ARROW_TYPES) +def test_to_arrow_w_arrow_type(series, expected): + array = pyarrow.array(series, type=expected.type) + assert array.equals(expected) + + @pytest.mark.parametrize( - ("series", "expected"), - ( - (pandas.Series([], dtype="dbdate"), pyarrow.array([], type=pyarrow.date64())), - ( - pandas.Series([None, None, None], dtype="dbdate"), - pyarrow.array([None, None, None], type=pyarrow.date64()), - ), - ( - pandas.Series( - [dt.date(2021, 9, 27), None, dt.date(2011, 9, 27)], dtype="dbdate" - ), - pyarrow.array( - [dt.date(2021, 9, 27), None, dt.date(2011, 9, 27)], - type=pyarrow.date64(), - ), - ), - ( - pandas.Series( - [dt.date(1677, 9, 22), dt.date(1970, 1, 1), dt.date(2262, 4, 11)], - dtype="dbdate", - ), - pyarrow.array( - [dt.date(1677, 9, 22), dt.date(1970, 1, 1), dt.date(2262, 4, 11)], - type=pyarrow.date64(), - ), - ), - ( - pandas.Series([], dtype="dbtime"), - pyarrow.array([], type=pyarrow.time32("ms")), - ), - ( - pandas.Series([None, None, None], dtype="dbtime"), - pyarrow.array([None, None, None], type=pyarrow.time32("ms")), - ), - ( - pandas.Series( - [dt.time(0, 0, 0, 0), None, dt.time(23, 59, 59, 999_000)], - dtype="dbtime", - ), + ["expected", "pyarrow_array"], + SERIES_ARRAYS_DEFAULT_TYPES + SERIES_ARRAYS_CUSTOM_ARROW_TYPES, +) +def test_series_from_arrow(pyarrow_array: pyarrow.Array, expected: pandas.Series): + # Convert to RecordBatch because types_mapper argument is ignored when + # using a pyarrow.Array. https://issues.apache.org/jira/browse/ARROW-9664 + record_batch = pyarrow.RecordBatch.from_arrays([pyarrow_array], ["test_col"]) + dataframe = record_batch.to_pandas(date_as_object=False, types_mapper=types_mapper) + series = dataframe["test_col"] + pandas.testing.assert_series_equal(series, expected, check_names=False) + + +@pytest.mark.parametrize( + ["expected", "pyarrow_array"], + SERIES_ARRAYS_DEFAULT_TYPES + SERIES_ARRAYS_CUSTOM_ARROW_TYPES, +) +def test_series_from_arrow_scalars( + pyarrow_array: pyarrow.Array, expected: pandas.Series +): + scalars = [] + for scalar in pyarrow_array: + scalars.append(scalar) + assert isinstance(scalar, pyarrow.Scalar) + series = pandas.Series(scalars, dtype=expected.dtype) + pandas.testing.assert_series_equal(series, expected) + + +def test_dbtime_series_from_arrow_array(): + """Test to explicitly check Array -> Series conversion.""" + array = pyarrow.array([dt.time(15, 21, 0, 123_456)], type=pyarrow.time64("us")) + assert isinstance(array, pyarrow.Array) + assert not isinstance(array, pyarrow.ChunkedArray) + series = pandas.Series(db_dtypes.TimeDtype.__from_arrow__(array)) + expected = pandas.Series([dt.time(15, 21, 0, 123_456)], dtype="dbtime") + pandas.testing.assert_series_equal(series, expected) + + +def test_dbtime_series_from_arrow_chunkedarray(): + """Test to explicitly check ChunkedArray -> Series conversion.""" + array1 = pyarrow.array([dt.time(15, 21, 0, 123_456)], type=pyarrow.time64("us")) + array2 = pyarrow.array([dt.time(0, 0, 0, 0)], type=pyarrow.time64("us")) + array = pyarrow.chunked_array([array1, array2]) + assert isinstance(array, pyarrow.ChunkedArray) + series = pandas.Series(db_dtypes.TimeDtype.__from_arrow__(array)) + expected = pandas.Series( + [dt.time(15, 21, 0, 123_456), dt.time(0, 0, 0, 0)], dtype="dbtime" + ) + pandas.testing.assert_series_equal(series, expected) + + +def test_dataframe_from_arrow(): + record_batch = pyarrow.RecordBatch.from_arrays( + [ pyarrow.array( - [dt.time(0, 0, 0, 0), None, dt.time(23, 59, 59, 999_000)], - type=pyarrow.time32("ms"), - ), - ), - ( - pandas.Series( - [dt.time(0, 0, 0, 0), None, dt.time(23, 59, 59, 999_999)], - dtype="dbtime", + [dt.date(2021, 11, 4), dt.date(2038, 1, 20), None, dt.date(1970, 1, 1)], + type=pyarrow.date32(), ), pyarrow.array( - [dt.time(0, 0, 0, 0), None, dt.time(23, 59, 59, 999_999)], - type=pyarrow.time64("us"), - ), - ), - ( - pandas.Series( [ - dt.time(0, 0, 0, 0), - dt.time(12, 30, 15, 125_000), + dt.time(10, 7, 8, 995_325), dt.time(23, 59, 59, 999_999), + None, + dt.time(0, 0, 0, 0), ], - dtype="dbtime", + type=pyarrow.time64("us"), ), - pyarrow.array( + pyarrow.array([1, 2, 3, 4]), + ], + ["date_col", "time_col", "int_col"], + ) + dataframe = record_batch.to_pandas(date_as_object=False, types_mapper=types_mapper) + expected = pandas.DataFrame( + { + "date_col": pandas.Series( + [dt.date(2021, 11, 4), dt.date(2038, 1, 20), None, dt.date(1970, 1, 1)], + dtype="dbdate", + ), + "time_col": pandas.Series( [ - dt.time(0, 0, 0, 0), - dt.time(12, 30, 15, 125_000), + dt.time(10, 7, 8, 995_325), dt.time(23, 59, 59, 999_999), + None, + dt.time(0, 0, 0, 0), ], - type=pyarrow.time64("us"), + dtype="dbtime", ), - ), - ), -) -def test_to_arrow_w_arrow_type(series, expected): - array = pyarrow.array(series, type=expected.type) - assert array.equals(expected) + "int_col": [1, 2, 3, 4], + }, + columns=["date_col", "time_col", "int_col"], + ) + pandas.testing.assert_frame_equal(dataframe, expected) From eac3dbbc950b653e33f24ba3631fe959b55490ad Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 8 Nov 2021 23:00:16 +0000 Subject: [PATCH 028/210] chore: release 0.3.0 (#41) :robot: I have created a release \*beep\* \*boop\* --- ## [0.3.0](https://www.github.com/googleapis/python-db-dtypes-pandas/compare/v0.2.0...v0.3.0) (2021-11-08) ### Features * support conversion from pyarrow RecordBatch to pandas DataFrame ([#39](https://www.github.com/googleapis/python-db-dtypes-pandas/issues/39)) ([facc7b0](https://www.github.com/googleapis/python-db-dtypes-pandas/commit/facc7b0897e27c5ba99399b7d453818c5b4aeca7)) * support Python 3.10 ([#40](https://www.github.com/googleapis/python-db-dtypes-pandas/issues/40)) ([a31d55d](https://www.github.com/googleapis/python-db-dtypes-pandas/commit/a31d55db57b2f5655b1fee4230a930d5bee4b1c9)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- packages/db-dtypes/CHANGELOG.md | 8 ++++++++ packages/db-dtypes/db_dtypes/version.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/db-dtypes/CHANGELOG.md b/packages/db-dtypes/CHANGELOG.md index 0b78ab38bd5d..3f956b428703 100644 --- a/packages/db-dtypes/CHANGELOG.md +++ b/packages/db-dtypes/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.3.0](https://www.github.com/googleapis/python-db-dtypes-pandas/compare/v0.2.0...v0.3.0) (2021-11-08) + + +### Features + +* support conversion from pyarrow RecordBatch to pandas DataFrame ([#39](https://www.github.com/googleapis/python-db-dtypes-pandas/issues/39)) ([facc7b0](https://www.github.com/googleapis/python-db-dtypes-pandas/commit/facc7b0897e27c5ba99399b7d453818c5b4aeca7)) +* support Python 3.10 ([#40](https://www.github.com/googleapis/python-db-dtypes-pandas/issues/40)) ([a31d55d](https://www.github.com/googleapis/python-db-dtypes-pandas/commit/a31d55db57b2f5655b1fee4230a930d5bee4b1c9)) + ## [0.2.0](https://www.github.com/googleapis/python-db-dtypes-pandas/compare/v0.1.1...v0.2.0) (2021-10-14) diff --git a/packages/db-dtypes/db_dtypes/version.py b/packages/db-dtypes/db_dtypes/version.py index 4da46cc4f5da..005815d406cb 100644 --- a/packages/db-dtypes/db_dtypes/version.py +++ b/packages/db-dtypes/db_dtypes/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "0.2.0" +__version__ = "0.3.0" From b9c81ebbc66a2c6c49e5b518c042bcdbcbf1500f Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 11 Nov 2021 13:05:03 -0500 Subject: [PATCH 029/210] chore(python): add .github/CODEOWNERS as a templated file (#44) Source-Link: https://github.com/googleapis/synthtool/commit/c5026b3217973a8db55db8ee85feee0e9a65e295 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:0e18b9475fbeb12d9ad4302283171edebb6baf2dfca1bd215ee3b34ed79d95d7 Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 2 +- packages/db-dtypes/.github/CODEOWNERS | 9 +++++---- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index cb89b2e326b7..7519fa3a2289 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:ec49167c606648a063d1222220b48119c912562849a0528f35bfb592a9f72737 + digest: sha256:0e18b9475fbeb12d9ad4302283171edebb6baf2dfca1bd215ee3b34ed79d95d7 diff --git a/packages/db-dtypes/.github/CODEOWNERS b/packages/db-dtypes/.github/CODEOWNERS index 1473ae0137ab..f8714a3e787d 100644 --- a/packages/db-dtypes/.github/CODEOWNERS +++ b/packages/db-dtypes/.github/CODEOWNERS @@ -3,9 +3,10 @@ # # For syntax help see: # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax +# Note: This file is autogenerated. To make changes to the codeowner team, please update .repo-metadata.json. -# The @googleapis/api-bigquery is the default owner for changes in this repo -* @googleapis/api-bigquery @googleapis/yoshi-python +# @googleapis/yoshi-python @googleapis/api-bigquery are the default owners for changes in this repo +* @googleapis/yoshi-python @googleapis/api-bigquery -# The python-samples-reviewers team is the default owner for samples changes -/samples/ @googleapis/python-samples-owners @googleapis/api-bigquery @googleapis/yoshi-python +# @googleapis/python-samples-owners @googleapis/api-bigquery are the default owners for samples changes +/samples/ @googleapis/python-samples-owners @googleapis/api-bigquery From 077cfc53ed950a39f23b780cee2f94820d9de37b Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Sat, 4 Dec 2021 14:46:06 -0600 Subject: [PATCH 030/210] fix: raise ValueError if date is out-of-bounds (#46) * fix: raise ValueError if date is out-of-bounds * unify _datetime return type * add relevant unit test --- packages/db-dtypes/db_dtypes/__init__.py | 50 ++++++++++++--------- packages/db-dtypes/db_dtypes/core.py | 4 +- packages/db-dtypes/tests/unit/test_arrow.py | 25 ++++++++--- packages/db-dtypes/tests/unit/test_date.py | 5 +++ 4 files changed, 56 insertions(+), 28 deletions(-) diff --git a/packages/db-dtypes/db_dtypes/__init__.py b/packages/db-dtypes/db_dtypes/__init__.py index f1424fb60d67..056be28fef7f 100644 --- a/packages/db-dtypes/db_dtypes/__init__.py +++ b/packages/db-dtypes/db_dtypes/__init__.py @@ -17,7 +17,7 @@ import datetime import re -from typing import Union +from typing import Optional, Union import numpy import packaging.version @@ -103,7 +103,7 @@ def _datetime( r"(?::(?P\d+)" r"(?:\.(?P\d*))?)?)?\s*$" ).match, - ): + ) -> Optional[numpy.datetime64]: # Convert pyarrow values to datetime.time. if isinstance(scalar, (pyarrow.Time32Scalar, pyarrow.Time64Scalar)): scalar = ( @@ -115,8 +115,16 @@ def _datetime( if scalar is None: return None - elif isinstance(scalar, datetime.time): - return datetime.datetime.combine(_EPOCH, scalar) + if isinstance(scalar, datetime.time): + return pandas.Timestamp( + year=1970, + month=1, + day=1, + hour=scalar.hour, + minute=scalar.minute, + second=scalar.second, + microsecond=scalar.microsecond, + ).to_datetime64() elif isinstance(scalar, pandas.Timestamp): return scalar.to_datetime64() elif isinstance(scalar, str): @@ -125,20 +133,20 @@ def _datetime( if not parsed: raise ValueError(f"Bad time string: {repr(scalar)}") - hours = parsed.group("hours") - minutes = parsed.group("minutes") - seconds = parsed.group("seconds") + hour = parsed.group("hours") + minute = parsed.group("minutes") + second = parsed.group("seconds") fraction = parsed.group("fraction") - microseconds = int(fraction.ljust(6, "0")[:6]) if fraction else 0 - return datetime.datetime( - 1970, - 1, - 1, - int(hours), - int(minutes) if minutes else 0, - int(seconds) if seconds else 0, - microseconds, - ) + nanosecond = int(fraction.ljust(9, "0")[:9]) if fraction else 0 + return pandas.Timestamp( + year=1970, + month=1, + day=1, + hour=int(hour), + minute=int(minute) if minute else 0, + second=int(second) if second else 0, + nanosecond=nanosecond, + ).to_datetime64() else: raise TypeError("Invalid value type", scalar) @@ -225,7 +233,7 @@ class DateArray(core.BaseDatetimeArray): def _datetime( scalar, match_fn=re.compile(r"\s*(?P\d+)-(?P\d+)-(?P\d+)\s*$").match, - ): + ) -> Optional[numpy.datetime64]: # Convert pyarrow values to datetime.date. if isinstance(scalar, (pyarrow.Date32Scalar, pyarrow.Date64Scalar)): scalar = scalar.as_py() @@ -233,7 +241,9 @@ def _datetime( if scalar is None: return None elif isinstance(scalar, datetime.date): - return datetime.datetime(scalar.year, scalar.month, scalar.day) + return pandas.Timestamp( + year=scalar.year, month=scalar.month, day=scalar.day + ).to_datetime64() elif isinstance(scalar, str): match = match_fn(scalar) if not match: @@ -241,7 +251,7 @@ def _datetime( year = int(match.group("year")) month = int(match.group("month")) day = int(match.group("day")) - return datetime.datetime(year, month, day) + return pandas.Timestamp(year=year, month=month, day=day).to_datetime64() else: raise TypeError("Invalid value type", scalar) diff --git a/packages/db-dtypes/db_dtypes/core.py b/packages/db-dtypes/db_dtypes/core.py index c8f3ad482a00..3ade1988e68b 100644 --- a/packages/db-dtypes/db_dtypes/core.py +++ b/packages/db-dtypes/db_dtypes/core.py @@ -127,9 +127,7 @@ def take( if allow_fill: fill_value = self._validate_scalar(fill_value) fill_value = ( - numpy.datetime64() - if fill_value is None - else numpy.datetime64(self._datetime(fill_value)) + numpy.datetime64() if fill_value is None else self._datetime(fill_value) ) if (indices < -1).any(): raise ValueError( diff --git a/packages/db-dtypes/tests/unit/test_arrow.py b/packages/db-dtypes/tests/unit/test_arrow.py index 5f45a90c1b61..4d4fc50baecf 100644 --- a/packages/db-dtypes/tests/unit/test_arrow.py +++ b/packages/db-dtypes/tests/unit/test_arrow.py @@ -183,13 +183,13 @@ def types_mapper( type=pyarrow.time64("us"), ), ), - ( + # Only microseconds are supported when reading data. See: + # https://github.com/googleapis/python-db-dtypes-pandas/issues/19 + # Still, round-trip with pyarrow nanosecond precision scalars + # is supported. + pytest.param( pandas.Series( [ - # Only microseconds are supported when reading data. See: - # https://github.com/googleapis/python-db-dtypes-pandas/issues/19 - # Still, round-trip with pyarrow nanosecond precision scalars - # is supported. pyarrow.scalar(0, pyarrow.time64("ns")), pyarrow.scalar( 12 * HOUR_NANOS @@ -216,6 +216,21 @@ def types_mapper( ], type=pyarrow.time64("ns"), ), + id="time-nanoseconds-arrow-round-trip", + ), + pytest.param( + pandas.Series( + ["0:0:0", "12:30:15.123456789", "23:59:59.999999999"], dtype="dbtime", + ), + pyarrow.array( + [ + 0, + 12 * HOUR_NANOS + 30 * MINUTE_NANOS + 15 * SECOND_NANOS + 123_456_789, + 23 * HOUR_NANOS + 59 * MINUTE_NANOS + 59 * SECOND_NANOS + 999_999_999, + ], + type=pyarrow.time64("ns"), + ), + id="time-nanoseconds-arrow-from-string", ), ] diff --git a/packages/db-dtypes/tests/unit/test_date.py b/packages/db-dtypes/tests/unit/test_date.py index c919f6d7250f..b906f245c52c 100644 --- a/packages/db-dtypes/tests/unit/test_date.py +++ b/packages/db-dtypes/tests/unit/test_date.py @@ -55,6 +55,11 @@ def test_date_parsing(value, expected): ("2021-2-99", "day is out of range for month"), ("2021-99-1", "month must be in 1[.][.]12"), ("10000-1-1", "year 10000 is out of range"), + # Outside of min/max values pandas.Timestamp. + ("0001-01-01", "Out of bounds"), + ("9999-12-31", "Out of bounds"), + ("1677-09-21", "Out of bounds"), + ("2262-04-12", "Out of bounds"), ], ) def test_date_parsing_errors(value, error): From 58f88559fb5a638df72277047c7dca6ba5a3deed Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 6 Dec 2021 09:25:31 -0600 Subject: [PATCH 031/210] chore: release 0.3.1 (#48) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/db-dtypes/CHANGELOG.md | 7 +++++++ packages/db-dtypes/db_dtypes/version.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/db-dtypes/CHANGELOG.md b/packages/db-dtypes/CHANGELOG.md index 3f956b428703..0298e79d02c8 100644 --- a/packages/db-dtypes/CHANGELOG.md +++ b/packages/db-dtypes/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.3.1](https://www.github.com/googleapis/python-db-dtypes-pandas/compare/v0.3.0...v0.3.1) (2021-12-04) + + +### Bug Fixes + +* raise ValueError if date is out-of-bounds ([#46](https://www.github.com/googleapis/python-db-dtypes-pandas/issues/46)) ([4253358](https://www.github.com/googleapis/python-db-dtypes-pandas/commit/4253358b673965f7d2823b750f56553f6627e130)) + ## [0.3.0](https://www.github.com/googleapis/python-db-dtypes-pandas/compare/v0.2.0...v0.3.0) (2021-11-08) diff --git a/packages/db-dtypes/db_dtypes/version.py b/packages/db-dtypes/db_dtypes/version.py index 005815d406cb..b118f0850400 100644 --- a/packages/db-dtypes/db_dtypes/version.py +++ b/packages/db-dtypes/db_dtypes/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "0.3.0" +__version__ = "0.3.1" From 50720d73eb17b1a27e60a82f406ff962d743eba7 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 30 Dec 2021 17:51:40 -0500 Subject: [PATCH 032/210] chore: update release_level in repo-metadata.json (#51) * chore: update .repo-metadata.json * revert * remove api_shortname --- packages/db-dtypes/.repo-metadata.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/db-dtypes/.repo-metadata.json b/packages/db-dtypes/.repo-metadata.json index 0f8ca37ea22a..176e5d9de749 100644 --- a/packages/db-dtypes/.repo-metadata.json +++ b/packages/db-dtypes/.repo-metadata.json @@ -2,7 +2,7 @@ "name": "db-dtypes", "name_pretty": "Pandas Data Types for SQL systems (BigQuery, Spanner)", "client_documentation": "https://googleapis.dev/python/db-dtypes/latest/index.html", - "release_level": "beta", + "release_level": "preview", "language": "python", "library_type": "INTEGRATION", "repo": "googleapis/python-db-dtypes-pandas", From f27387ab4d23d594c845351624bc035c8dcfbca4 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 6 Jan 2022 10:51:36 -0700 Subject: [PATCH 033/210] chore: use python-samples-reviewers (#52) Source-Link: https://github.com/googleapis/synthtool/commit/da9308710160980198d85a4bcddac1d6f6f1a5bc Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:899d5d7cc340fa8ef9d8ae1a8cfba362c6898584f779e156f25ee828ba824610 Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 2 +- packages/db-dtypes/.github/CODEOWNERS | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 7519fa3a2289..f33299ddbbab 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:0e18b9475fbeb12d9ad4302283171edebb6baf2dfca1bd215ee3b34ed79d95d7 + digest: sha256:899d5d7cc340fa8ef9d8ae1a8cfba362c6898584f779e156f25ee828ba824610 diff --git a/packages/db-dtypes/.github/CODEOWNERS b/packages/db-dtypes/.github/CODEOWNERS index f8714a3e787d..193b4363d07e 100644 --- a/packages/db-dtypes/.github/CODEOWNERS +++ b/packages/db-dtypes/.github/CODEOWNERS @@ -8,5 +8,5 @@ # @googleapis/yoshi-python @googleapis/api-bigquery are the default owners for changes in this repo * @googleapis/yoshi-python @googleapis/api-bigquery -# @googleapis/python-samples-owners @googleapis/api-bigquery are the default owners for samples changes -/samples/ @googleapis/python-samples-owners @googleapis/api-bigquery +# @googleapis/python-samples-reviewers @googleapis/api-bigquery are the default owners for samples changes +/samples/ @googleapis/python-samples-reviewers @googleapis/api-bigquery From 7b3e2de02c7f0f5441b38279a04490dad9464c1d Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 11 Jan 2022 07:36:02 -0500 Subject: [PATCH 034/210] chore(samples): Add check for tests in directory (#54) Source-Link: https://github.com/googleapis/synthtool/commit/52aef91f8d25223d9dbdb4aebd94ba8eea2101f3 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:36a95b8f494e4674dc9eee9af98961293b51b86b3649942aac800ae6c1f796d4 Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 2 +- .../db-dtypes/samples/snippets/noxfile.py | 70 +++++++++++-------- 2 files changed, 40 insertions(+), 32 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index f33299ddbbab..6b8a73b31465 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:899d5d7cc340fa8ef9d8ae1a8cfba362c6898584f779e156f25ee828ba824610 + digest: sha256:36a95b8f494e4674dc9eee9af98961293b51b86b3649942aac800ae6c1f796d4 diff --git a/packages/db-dtypes/samples/snippets/noxfile.py b/packages/db-dtypes/samples/snippets/noxfile.py index 93a9122cc457..3bbef5d54f44 100644 --- a/packages/db-dtypes/samples/snippets/noxfile.py +++ b/packages/db-dtypes/samples/snippets/noxfile.py @@ -14,6 +14,7 @@ from __future__ import print_function +import glob import os from pathlib import Path import sys @@ -184,37 +185,44 @@ def blacken(session: nox.sessions.Session) -> None: def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + # check for presence of tests + test_list = glob.glob("*_test.py") + glob.glob("test_*.py") + if len(test_list) == 0: + print("No tests found, skipping directory.") + else: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) From 1c43e61170e1eb66de3d3085018a9c832d97f953 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 14 Jan 2022 21:42:12 +0000 Subject: [PATCH 035/210] chore(python): update release.sh to use keystore (#56) build: switch to release-please for tagging --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 2 +- packages/db-dtypes/.github/release-please.yml | 1 + packages/db-dtypes/.github/release-trigger.yml | 1 + packages/db-dtypes/.kokoro/release.sh | 2 +- packages/db-dtypes/.kokoro/release/common.cfg | 12 +++++++++++- 5 files changed, 15 insertions(+), 3 deletions(-) create mode 100644 packages/db-dtypes/.github/release-trigger.yml diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 6b8a73b31465..eecb84c21b27 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:36a95b8f494e4674dc9eee9af98961293b51b86b3649942aac800ae6c1f796d4 + digest: sha256:ae600f36b6bc972b368367b6f83a1d91ec2c82a4a116b383d67d547c56fe6de3 diff --git a/packages/db-dtypes/.github/release-please.yml b/packages/db-dtypes/.github/release-please.yml index 4507ad0598a5..466597e5b196 100644 --- a/packages/db-dtypes/.github/release-please.yml +++ b/packages/db-dtypes/.github/release-please.yml @@ -1 +1,2 @@ releaseType: python +handleGHRelease: true diff --git a/packages/db-dtypes/.github/release-trigger.yml b/packages/db-dtypes/.github/release-trigger.yml new file mode 100644 index 000000000000..d4ca94189e16 --- /dev/null +++ b/packages/db-dtypes/.github/release-trigger.yml @@ -0,0 +1 @@ +enabled: true diff --git a/packages/db-dtypes/.kokoro/release.sh b/packages/db-dtypes/.kokoro/release.sh index 297fec580330..aa6052b514ee 100755 --- a/packages/db-dtypes/.kokoro/release.sh +++ b/packages/db-dtypes/.kokoro/release.sh @@ -26,7 +26,7 @@ python3 -m pip install --upgrade twine wheel setuptools export PYTHONUNBUFFERED=1 # Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_GFILE_DIR}/secret_manager/google-cloud-pypi-token") +TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-1") cd github/python-db-dtypes-pandas python3 setup.py sdist bdist_wheel twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/packages/db-dtypes/.kokoro/release/common.cfg b/packages/db-dtypes/.kokoro/release/common.cfg index 5917933f727e..44cc38429040 100644 --- a/packages/db-dtypes/.kokoro/release/common.cfg +++ b/packages/db-dtypes/.kokoro/release/common.cfg @@ -23,8 +23,18 @@ env_vars: { value: "github/python-db-dtypes-pandas/.kokoro/release.sh" } +# Fetch PyPI password +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "google-cloud-pypi-token-keystore-1" + } + } +} + # Tokens needed to report release status back to GitHub env_vars: { key: "SECRET_MANAGER_KEYS" - value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem,google-cloud-pypi-token" + value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" } From e4a5668a7b0572ff3459b8063bec2b35342d37ef Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 18 Jan 2022 20:25:26 -0500 Subject: [PATCH 036/210] chore(python): Noxfile recognizes that tests can live in a folder (#58) Source-Link: https://github.com/googleapis/synthtool/commit/4760d8dce1351d93658cb11d02a1b7ceb23ae5d7 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:f0e4b51deef56bed74d3e2359c583fc104a8d6367da3984fc5c66938db738828 Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 2 +- packages/db-dtypes/samples/snippets/noxfile.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index eecb84c21b27..52d79c11f3ad 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:ae600f36b6bc972b368367b6f83a1d91ec2c82a4a116b383d67d547c56fe6de3 + digest: sha256:f0e4b51deef56bed74d3e2359c583fc104a8d6367da3984fc5c66938db738828 diff --git a/packages/db-dtypes/samples/snippets/noxfile.py b/packages/db-dtypes/samples/snippets/noxfile.py index 3bbef5d54f44..20cdfc620138 100644 --- a/packages/db-dtypes/samples/snippets/noxfile.py +++ b/packages/db-dtypes/samples/snippets/noxfile.py @@ -187,6 +187,7 @@ def _session_tests( ) -> None: # check for presence of tests test_list = glob.glob("*_test.py") + glob.glob("test_*.py") + test_list.extend(glob.glob("tests")) if len(test_list) == 0: print("No tests found, skipping directory.") else: From 365fc9be502f228e4da1b187d4f7851dfd3c093a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 21 Jan 2022 07:31:11 -0500 Subject: [PATCH 037/210] ci(python): run lint / unit tests / docs as GH actions (#59) * ci(python): run lint / unit tests / docs as GH actions Source-Link: https://github.com/googleapis/synthtool/commit/57be0cdb0b94e1669cee0ca38d790de1dfdbcd44 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:ed1f9983d5a935a89fe8085e8bb97d94e41015252c5b6c9771257cf8624367e6 * add commit to trigger gh actions Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 16 +++++- packages/db-dtypes/.github/workflows/docs.yml | 38 +++++++++++++ packages/db-dtypes/.github/workflows/lint.yml | 25 ++++++++ .../db-dtypes/.github/workflows/unittest.yml | 57 +++++++++++++++++++ 4 files changed, 135 insertions(+), 1 deletion(-) create mode 100644 packages/db-dtypes/.github/workflows/docs.yml create mode 100644 packages/db-dtypes/.github/workflows/lint.yml create mode 100644 packages/db-dtypes/.github/workflows/unittest.yml diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 52d79c11f3ad..b668c04d5d65 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -1,3 +1,17 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:f0e4b51deef56bed74d3e2359c583fc104a8d6367da3984fc5c66938db738828 + digest: sha256:ed1f9983d5a935a89fe8085e8bb97d94e41015252c5b6c9771257cf8624367e6 + diff --git a/packages/db-dtypes/.github/workflows/docs.yml b/packages/db-dtypes/.github/workflows/docs.yml new file mode 100644 index 000000000000..f7b8344c4500 --- /dev/null +++ b/packages/db-dtypes/.github/workflows/docs.yml @@ -0,0 +1,38 @@ +on: + pull_request: + branches: + - main +name: docs +jobs: + docs: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.10" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run docs + run: | + nox -s docs + docfx: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.10" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run docfx + run: | + nox -s docfx diff --git a/packages/db-dtypes/.github/workflows/lint.yml b/packages/db-dtypes/.github/workflows/lint.yml new file mode 100644 index 000000000000..1e8b05c3d7ff --- /dev/null +++ b/packages/db-dtypes/.github/workflows/lint.yml @@ -0,0 +1,25 @@ +on: + pull_request: + branches: + - main +name: lint +jobs: + lint: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.10" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run lint + run: | + nox -s lint + - name: Run lint_setup_py + run: | + nox -s lint_setup_py diff --git a/packages/db-dtypes/.github/workflows/unittest.yml b/packages/db-dtypes/.github/workflows/unittest.yml new file mode 100644 index 000000000000..074ee2504ca5 --- /dev/null +++ b/packages/db-dtypes/.github/workflows/unittest.yml @@ -0,0 +1,57 @@ +on: + pull_request: + branches: + - main +name: unittest +jobs: + unit: + runs-on: ubuntu-latest + strategy: + matrix: + python: ['3.6', '3.7', '3.8', '3.9', '3.10'] + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python }} + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run unit tests + env: + COVERAGE_FILE: .coverage-${{ matrix.python }} + run: | + nox -s unit-${{ matrix.python }} + - name: Upload coverage results + uses: actions/upload-artifact@v2 + with: + name: coverage-artifacts + path: .coverage-${{ matrix.python }} + + cover: + runs-on: ubuntu-latest + needs: + - unit + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.10" + - name: Install coverage + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install coverage + - name: Download coverage results + uses: actions/download-artifact@v2 + with: + name: coverage-artifacts + path: .coverage-results/ + - name: Report coverage results + run: | + coverage combine .coverage-results/.coverage* + coverage report --show-missing --fail-under=100 From 00fb37c3cb51b873b1583482dec1d32062bc18ac Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Tue, 25 Jan 2022 15:42:25 -0600 Subject: [PATCH 038/210] chore: temporarily add custom repo settings (#61) --- .../db-dtypes/.github/sync-repo-settings.yaml | 31 +++++++++++++++++++ 1 file changed, 31 insertions(+) create mode 100644 packages/db-dtypes/.github/sync-repo-settings.yaml diff --git a/packages/db-dtypes/.github/sync-repo-settings.yaml b/packages/db-dtypes/.github/sync-repo-settings.yaml new file mode 100644 index 000000000000..ebe13aa89894 --- /dev/null +++ b/packages/db-dtypes/.github/sync-repo-settings.yaml @@ -0,0 +1,31 @@ +# https://github.com/googleapis/repo-automation-bots/tree/main/packages/sync-repo-settings +# Rules for main branch protection +branchProtectionRules: +# Identifies the protection rule pattern. Name of the branch to be protected. +# Defaults to `main` +- pattern: main + requiresCodeOwnerReviews: true + requiresStrictStatusChecks: true + requiredStatusCheckContexts: + - 'cla/google' + - 'OwlBot Post Processor' + - 'docs' + - 'docfx' + - 'lint' + - 'unit (3.6)' + - 'unit (3.7)' + - 'unit (3.8)' + - 'unit (3.9)' + - 'unit (3.10)' + - 'cover' +permissionRules: + - team: actools-python + permission: admin + - team: actools + permission: admin + - team: yoshi-python + permission: push + - team: python-samples-owners + permission: push + - team: python-samples-reviewers + permission: push From b08bbe7ee4a8060d230475d1a9d56474f8544166 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Wed, 26 Jan 2022 09:17:21 -0600 Subject: [PATCH 039/210] fix: use public pandas APIs where possible (#60) * refactor: use public pandas APIs where possible * no need to override take * backport take implementation * move remaining private pandas methods to backports * add note about _validate_scalar to docstring * comment why we can't use public mixin --- packages/db-dtypes/db_dtypes/__init__.py | 16 ++-- packages/db-dtypes/db_dtypes/core.py | 80 +++++-------------- .../db-dtypes/db_dtypes/pandas_backports.py | 47 ++++++++++- 3 files changed, 69 insertions(+), 74 deletions(-) diff --git a/packages/db-dtypes/db_dtypes/__init__.py b/packages/db-dtypes/db_dtypes/__init__.py index 056be28fef7f..a518a0bc179a 100644 --- a/packages/db-dtypes/db_dtypes/__init__.py +++ b/packages/db-dtypes/db_dtypes/__init__.py @@ -22,13 +22,7 @@ import numpy import packaging.version import pandas -import pandas.compat.numpy.function -import pandas.core.algorithms -import pandas.core.arrays -import pandas.core.dtypes.base -import pandas.core.dtypes.dtypes -import pandas.core.dtypes.generic -import pandas.core.nanops +import pandas.api.extensions import pyarrow import pyarrow.compute @@ -44,7 +38,7 @@ pandas_release = packaging.version.parse(pandas.__version__).release -@pandas.core.dtypes.dtypes.register_extension_dtype +@pandas.api.extensions.register_extension_dtype class TimeDtype(core.BaseDatetimeDtype): """ Extension dtype for time data. @@ -113,7 +107,7 @@ def _datetime( .as_py() ) - if scalar is None: + if pandas.isna(scalar): return None if isinstance(scalar, datetime.time): return pandas.Timestamp( @@ -194,7 +188,7 @@ def __arrow_array__(self, type=None): ) -@pandas.core.dtypes.dtypes.register_extension_dtype +@pandas.api.extensions.register_extension_dtype class DateDtype(core.BaseDatetimeDtype): """ Extension dtype for time data. @@ -238,7 +232,7 @@ def _datetime( if isinstance(scalar, (pyarrow.Date32Scalar, pyarrow.Date64Scalar)): scalar = scalar.as_py() - if scalar is None: + if pandas.isna(scalar): return None elif isinstance(scalar, datetime.date): return pandas.Timestamp( diff --git a/packages/db-dtypes/db_dtypes/core.py b/packages/db-dtypes/db_dtypes/core.py index 3ade1988e68b..05daf37dc4f7 100644 --- a/packages/db-dtypes/db_dtypes/core.py +++ b/packages/db-dtypes/db_dtypes/core.py @@ -12,20 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Optional, Sequence +from typing import Optional import numpy import pandas -from pandas._libs import NaT +from pandas import NaT import pandas.api.extensions -import pandas.compat.numpy.function -import pandas.core.algorithms -import pandas.core.arrays -import pandas.core.dtypes.base -from pandas.core.dtypes.common import is_dtype_equal, is_list_like, pandas_dtype -import pandas.core.dtypes.dtypes -import pandas.core.dtypes.generic -import pandas.core.nanops +from pandas.api.types import is_dtype_equal, is_list_like, pandas_dtype from db_dtypes import pandas_backports @@ -107,42 +100,11 @@ def isna(self): return pandas.isna(self._ndarray) def _validate_scalar(self, value): - if pandas.isna(value): - return None - - if not isinstance(value, self.dtype.type): - raise ValueError(value) - - return value - - def take( - self, - indices: Sequence[int], - *, - allow_fill: bool = False, - fill_value: Any = None, - ): - indices = numpy.asarray(indices, dtype=numpy.intp) - data = self._ndarray - if allow_fill: - fill_value = self._validate_scalar(fill_value) - fill_value = ( - numpy.datetime64() if fill_value is None else self._datetime(fill_value) - ) - if (indices < -1).any(): - raise ValueError( - "take called with negative indexes other than -1," - " when a fill value is provided." - ) - out = data.take(indices) - if allow_fill: - out[indices == -1] = fill_value - - return self.__class__(out) - - # TODO: provide implementations of dropna, fillna, unique, - # factorize, argsort, searchsoeted for better performance over - # abstract implementations. + """ + Validate and convert a scalar value to datetime64[ns] for storage in + backing NumPy array. + """ + return self._datetime(value) def any( self, @@ -152,10 +114,8 @@ def any( keepdims: bool = False, skipna: bool = True, ): - pandas.compat.numpy.function.validate_any( - (), {"out": out, "keepdims": keepdims} - ) - result = pandas.core.nanops.nanany(self._ndarray, axis=axis, skipna=skipna) + pandas_backports.numpy_validate_any((), {"out": out, "keepdims": keepdims}) + result = pandas_backports.nanany(self._ndarray, axis=axis, skipna=skipna) return result def all( @@ -166,22 +126,20 @@ def all( keepdims: bool = False, skipna: bool = True, ): - pandas.compat.numpy.function.validate_all( - (), {"out": out, "keepdims": keepdims} - ) - result = pandas.core.nanops.nanall(self._ndarray, axis=axis, skipna=skipna) + pandas_backports.numpy_validate_all((), {"out": out, "keepdims": keepdims}) + result = pandas_backports.nanall(self._ndarray, axis=axis, skipna=skipna) return result def min(self, *, axis: Optional[int] = None, skipna: bool = True, **kwargs): - pandas.compat.numpy.function.validate_min((), kwargs) - result = pandas.core.nanops.nanmin( + pandas_backports.numpy_validate_min((), kwargs) + result = pandas_backports.nanmin( values=self._ndarray, axis=axis, mask=self.isna(), skipna=skipna ) return self._box_func(result) def max(self, *, axis: Optional[int] = None, skipna: bool = True, **kwargs): - pandas.compat.numpy.function.validate_max((), kwargs) - result = pandas.core.nanops.nanmax( + pandas_backports.numpy_validate_max((), kwargs) + result = pandas_backports.nanmax( values=self._ndarray, axis=axis, mask=self.isna(), skipna=skipna ) return self._box_func(result) @@ -197,11 +155,9 @@ def median( keepdims: bool = False, skipna: bool = True, ): - pandas.compat.numpy.function.validate_median( + pandas_backports.numpy_validate_median( (), {"out": out, "overwrite_input": overwrite_input, "keepdims": keepdims}, ) - result = pandas.core.nanops.nanmedian( - self._ndarray, axis=axis, skipna=skipna - ) + result = pandas_backports.nanmedian(self._ndarray, axis=axis, skipna=skipna) return self._box_func(result) diff --git a/packages/db-dtypes/db_dtypes/pandas_backports.py b/packages/db-dtypes/db_dtypes/pandas_backports.py index 003224f34536..4b733cc6d1d3 100644 --- a/packages/db-dtypes/db_dtypes/pandas_backports.py +++ b/packages/db-dtypes/db_dtypes/pandas_backports.py @@ -20,15 +20,32 @@ """ import operator +from typing import Any import numpy import packaging.version import pandas -from pandas._libs.lib import is_integer +from pandas.api.types import is_integer +import pandas.compat.numpy.function +import pandas.core.nanops pandas_release = packaging.version.parse(pandas.__version__).release +# Create aliases for private methods in case they move in a future version. +nanall = pandas.core.nanops.nanall +nanany = pandas.core.nanops.nanany +nanmax = pandas.core.nanops.nanmax +nanmin = pandas.core.nanops.nanmin +numpy_validate_all = pandas.compat.numpy.function.validate_all +numpy_validate_any = pandas.compat.numpy.function.validate_any +numpy_validate_max = pandas.compat.numpy.function.validate_max +numpy_validate_min = pandas.compat.numpy.function.validate_min + +if pandas_release >= (1, 2): + nanmedian = pandas.core.nanops.nanmedian + numpy_validate_median = pandas.compat.numpy.function.validate_median + def import_default(module_name, force=False, default=None): """ @@ -55,6 +72,10 @@ def import_default(module_name, force=False, default=None): return getattr(module, name, default) +# pandas.core.arraylike.OpsMixin is private, but the related public API +# "ExtensionScalarOpsMixin" is not sufficient for adding dates to times. +# It results in unsupported operand type(s) for +: 'datetime.time' and +# 'datetime.date' @import_default("pandas.core.arraylike") class OpsMixin: def _cmp_method(self, other, op): # pragma: NO COVER @@ -81,6 +102,8 @@ def __ge__(self, other): __add__ = __radd__ = __sub__ = lambda self, other: NotImplemented +# TODO: use public API once pandas 1.5 / 2.x is released. +# See: https://github.com/pandas-dev/pandas/pull/45544 @import_default("pandas.core.arrays._mixins", pandas_release < (1, 3)) class NDArrayBackedExtensionArray(pandas.core.arrays.base.ExtensionArray): @@ -130,6 +153,28 @@ def copy(self): def repeat(self, n): return self.__class__(self._ndarray.repeat(n), self._dtype) + def take( + self, + indices, + *, + allow_fill: bool = False, + fill_value: Any = None, + axis: int = 0, + ): + from pandas.core.algorithms import take + + if allow_fill: + fill_value = self._validate_scalar(fill_value) + + new_data = take( + self._ndarray, + indices, + allow_fill=allow_fill, + fill_value=fill_value, + axis=axis, + ) + return self._from_backing_data(new_data) + @classmethod def _concat_same_type(cls, to_concat, axis=0): dtypes = {str(x.dtype) for x in to_concat} From cf8303b76b83b584edb99a36f2223b34a78c090d Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Wed, 2 Feb 2022 16:17:42 -0600 Subject: [PATCH 040/210] fix!: use `pandas.NaT` for missing values in dbdate and dbtime dtypes (#67) * fix!: use `pandas.NaT` for missing values in dbdate and dbtime dtypes This makes them consistent with other date/time dtypes, as well as internally consistent with the advertised `dtype.na_value`. * adjust pandas version support for median BREAKING-CHANGE: dbdate and dbtime dtypes return NaT instead of None for missing values Release-As: 0.4.0 --- packages/db-dtypes/db_dtypes/__init__.py | 8 +- packages/db-dtypes/db_dtypes/core.py | 5 +- .../db-dtypes/db_dtypes/pandas_backports.py | 2 +- .../db-dtypes/testing/constraints-3.9.txt | 3 +- packages/db-dtypes/tests/unit/test_date.py | 27 +++++ packages/db-dtypes/tests/unit/test_dtypes.py | 112 ++++++++++-------- packages/db-dtypes/tests/unit/test_time.py | 30 +++++ 7 files changed, 127 insertions(+), 60 deletions(-) diff --git a/packages/db-dtypes/db_dtypes/__init__.py b/packages/db-dtypes/db_dtypes/__init__.py index a518a0bc179a..a222e6dee1ba 100644 --- a/packages/db-dtypes/db_dtypes/__init__.py +++ b/packages/db-dtypes/db_dtypes/__init__.py @@ -145,8 +145,8 @@ def _datetime( raise TypeError("Invalid value type", scalar) def _box_func(self, x): - if pandas.isnull(x): - return None + if pandas.isna(x): + return pandas.NaT try: return x.astype("= (1, 2): +if pandas_release >= (1, 3): nanmedian = pandas.core.nanops.nanmedian numpy_validate_median = pandas.compat.numpy.function.validate_median diff --git a/packages/db-dtypes/testing/constraints-3.9.txt b/packages/db-dtypes/testing/constraints-3.9.txt index eebb9da6fc72..d814dcd4924a 100644 --- a/packages/db-dtypes/testing/constraints-3.9.txt +++ b/packages/db-dtypes/testing/constraints-3.9.txt @@ -1 +1,2 @@ -sqlalchemy>=1.4.13 +# Make sure we test with pandas 1.3.0. The Python version isn't that relevant. +pandas==1.3.0 diff --git a/packages/db-dtypes/tests/unit/test_date.py b/packages/db-dtypes/tests/unit/test_date.py index b906f245c52c..bf877eac31ed 100644 --- a/packages/db-dtypes/tests/unit/test_date.py +++ b/packages/db-dtypes/tests/unit/test_date.py @@ -19,6 +19,7 @@ # To register the types. import db_dtypes # noqa +from db_dtypes import pandas_backports @pytest.mark.parametrize( @@ -65,3 +66,29 @@ def test_date_parsing(value, expected): def test_date_parsing_errors(value, error): with pytest.raises(ValueError, match=error): pandas.Series([value], dtype="dbdate") + + +@pytest.mark.skipif( + not hasattr(pandas_backports, "numpy_validate_median"), + reason="median not available with this version of pandas", +) +@pytest.mark.parametrize( + "values, expected", + [ + (["1970-01-01", "1900-01-01", "2000-01-01"], datetime.date(1970, 1, 1)), + ( + [ + None, + "1900-01-01", + pandas.NA if hasattr(pandas, "NA") else None, + pandas.NaT, + float("nan"), + ], + datetime.date(1900, 1, 1), + ), + (["2222-02-01", "2222-02-03"], datetime.date(2222, 2, 2)), + ], +) +def test_date_median(values, expected): + series = pandas.Series(values, dtype="dbdate") + assert series.median() == expected diff --git a/packages/db-dtypes/tests/unit/test_dtypes.py b/packages/db-dtypes/tests/unit/test_dtypes.py index aacbf0b464c9..66074d8eb936 100644 --- a/packages/db-dtypes/tests/unit/test_dtypes.py +++ b/packages/db-dtypes/tests/unit/test_dtypes.py @@ -23,8 +23,8 @@ pandas_release = packaging.version.parse(pd.__version__).release SAMPLE_RAW_VALUES = dict( - dbdate=(datetime.date(2021, 2, 2), "2021-2-3", None), - dbtime=(datetime.time(1, 2, 2), "1:2:3.5", None), + dbdate=(datetime.date(2021, 2, 2), "2021-2-3", pd.NaT), + dbtime=(datetime.time(1, 2, 2), "1:2:3.5", pd.NaT), ) SAMPLE_VALUES = dict( dbdate=( @@ -90,7 +90,7 @@ def test_array_construction(dtype, factory_method): factory = getattr(factory, factory_method) if factory_method == "_from_sequence_of_strings": sample_raw_values = [ - str(v) if v is not None else v for v in sample_raw_values + str(v) if not pd.isna(v) else v for v in sample_raw_values ] a = factory(sample_raw_values) assert len(a) == 3 @@ -98,11 +98,11 @@ def test_array_construction(dtype, factory_method): assert a.shape == (3,) sample_values = SAMPLE_VALUES[dtype] assert a[0], a[1] == sample_values[:2] - assert a[2] is None + assert pd.isna(a[2]) and a[2] is pd.NaT # implementation details: assert a.nbytes == 24 - assert np.array_equal( + np.testing.assert_array_equal( a._ndarray == np.array(SAMPLE_DT_VALUES[dtype][:2] + ("NaT",), dtype="datetime64[us]"), [True, True, False], @@ -121,7 +121,7 @@ def test_time_series_construction(dtype): s = pd.Series(SAMPLE_RAW_VALUES[dtype], dtype=dtype) assert len(s) == 3 assert s[0], s[1] == sample_values[:2] - assert s[2] is None + assert s[2] is pd.NaT assert s.nbytes == 24 assert isinstance(s.array, _cls(dtype)) @@ -166,8 +166,8 @@ def test_timearray_comparisons( # Note that the right_obs comparisons work because # they're called on right_obs rather then left, because # TimeArrays only support comparisons with TimeArrays. - assert np.array_equal(comparisons[op](left, r), expected) - assert np.array_equal(complements[op](left, r), ~expected) + np.testing.assert_array_equal(comparisons[op](left, r), expected) + np.testing.assert_array_equal(complements[op](left, r), ~expected) # Bad shape for bad_shape in ([], [1, 2, 3]): @@ -186,10 +186,10 @@ def test_timearray_comparisons( [1], # a single-element array gets broadcast ): if op == "==": - assert np.array_equal( + np.testing.assert_array_equal( comparisons[op](left, np.array(bad_items)), np.array([False, False]) ) - assert np.array_equal( + np.testing.assert_array_equal( complements[op](left, np.array(bad_items)), np.array([True, True]) ) else: @@ -204,7 +204,7 @@ def test_timearray_comparisons( def test___getitem___arrayindex(dtype): cls = _cls(dtype) sample_values = SAMPLE_VALUES[dtype] - assert np.array_equal( + np.testing.assert_array_equal( cls(sample_values)[[1, 3]], cls([sample_values[1], sample_values[3]]), ) @@ -215,21 +215,23 @@ def test_timearray_slicing(dtype): b = a[:] assert b is not a assert b.__class__ == a.__class__ - assert np.array_equal(b, a) + np.testing.assert_array_equal(b._ndarray, a._ndarray) sample_values = SAMPLE_VALUES[dtype] cls = _cls(dtype) - assert np.array_equal(a[:1], cls._from_sequence(sample_values[:1])) + np.testing.assert_array_equal( + a[:1]._ndarray, cls._from_sequence(sample_values[:1])._ndarray + ) # Assignment works: a[:1] = cls._from_sequence([sample_values[2]]) - assert np.array_equal( + np.testing.assert_array_equal( a[:2], cls._from_sequence([sample_values[2], sample_values[1]]) ) # Series also work: s = pd.Series(SAMPLE_RAW_VALUES[dtype], dtype=dtype) - assert np.array_equal(s[:1].array, cls._from_sequence([sample_values[0]])) + np.testing.assert_array_equal(s[:1].array, cls._from_sequence([sample_values[0]])) @for_date_and_time @@ -238,9 +240,13 @@ def test_item_assignment(dtype): sample_values = SAMPLE_VALUES[dtype] cls = _cls(dtype) a[0] = sample_values[2] - assert np.array_equal(a, cls._from_sequence([sample_values[2], sample_values[1]])) + np.testing.assert_array_equal( + a, cls._from_sequence([sample_values[2], sample_values[1]]) + ) a[1] = None - assert np.array_equal(a, cls._from_sequence([sample_values[2], None])) + np.testing.assert_array_equal( + a._ndarray, cls._from_sequence([sample_values[2], None])._ndarray + ) @for_date_and_time @@ -249,9 +255,9 @@ def test_array_assignment(dtype): cls = _cls(dtype) sample_values = SAMPLE_VALUES[dtype] a[a.isna()] = sample_values[3] - assert np.array_equal(a, cls([sample_values[i] for i in (0, 1, 3)])) + np.testing.assert_array_equal(a, cls([sample_values[i] for i in (0, 1, 3)])) a[[0, 2]] = sample_values[2] - assert np.array_equal(a, cls([sample_values[i] for i in (2, 1, 2)])) + np.testing.assert_array_equal(a, cls([sample_values[i] for i in (2, 1, 2)])) @for_date_and_time @@ -270,7 +276,7 @@ def test_copy(dtype): b = a.copy() assert b is not a assert b._ndarray is not a._ndarray - assert np.array_equal(b, a) + np.testing.assert_array_equal(b, a) @for_date_and_time @@ -280,7 +286,7 @@ def test_from_ndarray_copy(dtype): a = cls._from_sequence(sample_values) b = cls(a._ndarray, copy=True) assert b._ndarray is not a._ndarray - assert np.array_equal(b, a) + np.testing.assert_array_equal(b, a) @for_date_and_time @@ -310,7 +316,7 @@ def test__validate_scalar_invalid(dtype): [ (False, None), (True, None), - (True, pd._libs.NaT if pd else None), + (True, pd.NaT if pd else None), (True, np.NaN if pd else None), (True, 42), ], @@ -326,7 +332,7 @@ def test_take(dtype, allow_fill, fill_value): else datetime.time(0, 42, 42, 424242) ) else: - expected_fill = None + expected_fill = pd.NaT b = a.take([1, -1, 3], allow_fill=True, fill_value=fill_value) expect = [sample_values[1], expected_fill, sample_values[3]] else: @@ -370,7 +376,7 @@ def test__concat_same_type_not_same_type(dtype): @for_date_and_time def test_dropna(dtype): - assert np.array_equal(_make_one(dtype).dropna(), _make_one(dtype)[:2]) + np.testing.assert_array_equal(_make_one(dtype).dropna(), _make_one(dtype)[:2]) @pytest.mark.parametrize( @@ -398,14 +404,18 @@ def test_fillna(dtype, value, meth, limit, expect): elif value is not None: value = sample_values[value] expect = cls([None if i is None else sample_values[i] for i in expect]) - assert np.array_equal(a.fillna(value, meth, limit), expect) + np.testing.assert_array_equal( + a.fillna(value, meth, limit)._ndarray, expect._ndarray + ) @for_date_and_time def test_unique(dtype): cls = _cls(dtype) sample_values = SAMPLE_VALUES[dtype] - assert np.array_equal(cls(sample_values * 3).unique(), cls(sample_values),) + np.testing.assert_array_equal( + cls(sample_values * 3).unique(), cls(sample_values), + ) @for_date_and_time @@ -421,7 +431,7 @@ def test_astype_copy(dtype): b = a.astype(a.dtype, copy=True) assert b is not a assert b.__class__ is a.__class__ - assert np.array_equal(b, a) + np.testing.assert_array_equal(b._ndarray, a._ndarray) @pytest.mark.parametrize( @@ -452,7 +462,7 @@ def test_asdatetime(dtype, same): b = a.astype(dt, copy=copy) assert b is not a._ndarray - assert np.array_equal(b[:2], a._ndarray[:2]) + np.testing.assert_array_equal(b[:2], a._ndarray[:2]) assert pd.isna(b[2]) and str(b[2]) == "NaT" @@ -482,7 +492,7 @@ def test_astimedelta(dtype): a = _cls("dbtime")([t, None]) b = a.astype(dtype) - np.array_equal(b[:1], expect) + np.testing.assert_array_equal(b[:1], expect) assert pd.isna(b[1]) and str(b[1]) == "NaT" @@ -523,7 +533,7 @@ def test_min_max_median(dtype): a = cls(data) assert a.min() == sample_values[0] assert a.max() == sample_values[-1] - if pandas_release >= (1, 2): + if pandas_release >= (1, 3): assert ( a.median() == datetime.time(1, 2, 4) if dtype == "dbtime" @@ -531,26 +541,26 @@ def test_min_max_median(dtype): ) empty = cls([]) - assert empty.min() is None - assert empty.max() is None - if pandas_release >= (1, 2): - assert empty.median() is None + assert empty.min() is pd.NaT + assert empty.max() is pd.NaT + if pandas_release >= (1, 3): + assert empty.median() is pd.NaT empty = cls([None]) - assert empty.min() is None - assert empty.max() is None - assert empty.min(skipna=False) is None - assert empty.max(skipna=False) is None - if pandas_release >= (1, 2): + assert empty.min() is pd.NaT + assert empty.max() is pd.NaT + assert empty.min(skipna=False) is pd.NaT + assert empty.max(skipna=False) is pd.NaT + if pandas_release >= (1, 3): with pytest.warns(RuntimeWarning, match="empty slice"): # It's weird that we get the warning here, and not # below. :/ - assert empty.median() is None - assert empty.median(skipna=False) is None + assert empty.median() is pd.NaT + assert empty.median(skipna=False) is pd.NaT a = _make_one(dtype) assert a.min() == sample_values[0] assert a.max() == sample_values[1] - if pandas_release >= (1, 2): + if pandas_release >= (1, 3): assert ( a.median() == datetime.time(1, 2, 2, 750000) if dtype == "dbtime" @@ -563,14 +573,14 @@ def test_date_add(): times = _cls("dbtime")(SAMPLE_VALUES["dbtime"]) expect = dates.astype("datetime64") + times.astype("timedelta64") - assert np.array_equal(dates + times, expect) - assert np.array_equal(times + dates, expect) + np.testing.assert_array_equal(dates + times, expect) + np.testing.assert_array_equal(times + dates, expect) do = pd.DateOffset(days=1) expect = dates.astype("object") + do - assert np.array_equal(dates + do, expect) + np.testing.assert_array_equal(dates + do, expect) if pandas_release >= (1, 1): - assert np.array_equal(do + dates, expect) + np.testing.assert_array_equal(do + dates, expect) with pytest.raises(TypeError): dates + times.astype("timedelta64") @@ -587,8 +597,8 @@ def test_date_add(): do = pd.Series([pd.DateOffset(days=i) for i in range(4)]) expect = dates.astype("object") + do - assert np.array_equal(dates + do, expect) - assert np.array_equal(do + dates, expect) + np.testing.assert_array_equal(dates + do, expect) + np.testing.assert_array_equal(do + dates, expect) def test_date_sub(): @@ -602,11 +612,11 @@ def test_date_sub(): ) ) expect = dates.astype("datetime64") - dates2.astype("datetime64") - assert np.array_equal(dates - dates2, expect) + np.testing.assert_array_equal(dates - dates2, expect) do = pd.DateOffset(days=1) expect = dates.astype("object") - do - assert np.array_equal(dates - do, expect) + np.testing.assert_array_equal(dates - do, expect) with pytest.raises(TypeError): dates - 42 @@ -620,4 +630,4 @@ def test_date_sub(): do = pd.Series([pd.DateOffset(days=i) for i in range(4)]) expect = dates.astype("object") - do - assert np.array_equal(dates - do, expect) + np.testing.assert_array_equal(dates - do, expect) diff --git a/packages/db-dtypes/tests/unit/test_time.py b/packages/db-dtypes/tests/unit/test_time.py index ba459499a1cb..8ecb9960b6e1 100644 --- a/packages/db-dtypes/tests/unit/test_time.py +++ b/packages/db-dtypes/tests/unit/test_time.py @@ -19,6 +19,7 @@ # To register the types. import db_dtypes # noqa +from db_dtypes import pandas_backports @pytest.mark.parametrize( @@ -82,3 +83,32 @@ def test_time_parsing(value, expected): def test_time_parsing_errors(value, error): with pytest.raises(ValueError, match=error): pandas.Series([value], dtype="dbtime") + + +@pytest.mark.skipif( + not hasattr(pandas_backports, "numpy_validate_median"), + reason="median not available with this version of pandas", +) +@pytest.mark.parametrize( + "values, expected", + [ + ( + ["00:00:00", "12:34:56.789101", "23:59:59.999999"], + datetime.time(12, 34, 56, 789101), + ), + ( + [ + None, + "06:30:00", + pandas.NA if hasattr(pandas, "NA") else None, + pandas.NaT, + float("nan"), + ], + datetime.time(6, 30), + ), + (["2:22:21.222222", "2:22:23.222222"], datetime.time(2, 22, 22, 222222)), + ], +) +def test_date_median(values, expected): + series = pandas.Series(values, dtype="dbtime") + assert series.median() == expected From b2bc49f94ac00002d84ffa49f9ff15170fc8a106 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 9 Feb 2022 08:50:55 -0500 Subject: [PATCH 041/210] chore: remove custom sync-repo-settings (#69) --- .../db-dtypes/.github/sync-repo-settings.yaml | 31 ------------------- 1 file changed, 31 deletions(-) delete mode 100644 packages/db-dtypes/.github/sync-repo-settings.yaml diff --git a/packages/db-dtypes/.github/sync-repo-settings.yaml b/packages/db-dtypes/.github/sync-repo-settings.yaml deleted file mode 100644 index ebe13aa89894..000000000000 --- a/packages/db-dtypes/.github/sync-repo-settings.yaml +++ /dev/null @@ -1,31 +0,0 @@ -# https://github.com/googleapis/repo-automation-bots/tree/main/packages/sync-repo-settings -# Rules for main branch protection -branchProtectionRules: -# Identifies the protection rule pattern. Name of the branch to be protected. -# Defaults to `main` -- pattern: main - requiresCodeOwnerReviews: true - requiresStrictStatusChecks: true - requiredStatusCheckContexts: - - 'cla/google' - - 'OwlBot Post Processor' - - 'docs' - - 'docfx' - - 'lint' - - 'unit (3.6)' - - 'unit (3.7)' - - 'unit (3.8)' - - 'unit (3.9)' - - 'unit (3.10)' - - 'cover' -permissionRules: - - team: actools-python - permission: admin - - team: actools - permission: admin - - team: yoshi-python - permission: push - - team: python-samples-owners - permission: push - - team: python-samples-reviewers - permission: push From 802b3ed196b6a38c4449bf660bb386cceb02f1d7 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 28 Feb 2022 20:50:35 +0100 Subject: [PATCH 042/210] chore(deps): update all dependencies (#68) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Update setup.py Co-authored-by: Owl Bot Co-authored-by: Tim Swast --- packages/db-dtypes/samples/snippets/requirements-test.txt | 2 +- packages/db-dtypes/setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/db-dtypes/samples/snippets/requirements-test.txt b/packages/db-dtypes/samples/snippets/requirements-test.txt index 927094516e65..c2845bffbe89 100644 --- a/packages/db-dtypes/samples/snippets/requirements-test.txt +++ b/packages/db-dtypes/samples/snippets/requirements-test.txt @@ -1 +1 @@ -pytest==6.2.5 +pytest==7.0.1 diff --git a/packages/db-dtypes/setup.py b/packages/db-dtypes/setup.py index 8def678a1ee4..7ad511900869 100644 --- a/packages/db-dtypes/setup.py +++ b/packages/db-dtypes/setup.py @@ -31,7 +31,7 @@ dependencies = [ "packaging >= 17.0", "pandas >= 0.24.2, < 2.0dev", - "pyarrow>=3.0.0, <7.0dev", + "pyarrow>=3.0.0, <8.0dev", "numpy >= 1.16.6, < 2.0dev", ] From d2bbe21dd5a83851f0cfd60f3442a430917c791e Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 1 Mar 2022 14:54:22 +0000 Subject: [PATCH 043/210] chore(deps): update actions/setup-python action to v3 (#71) Source-Link: https://github.com/googleapis/synthtool/commit/571ee2c3b26182429eddcf115122ee545d7d3787 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:660abdf857d3ab9aabcd967c163c70e657fcc5653595c709263af5f3fa23ef67 --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 3 +-- packages/db-dtypes/.github/workflows/docs.yml | 4 ++-- packages/db-dtypes/.github/workflows/lint.yml | 2 +- packages/db-dtypes/.github/workflows/unittest.yml | 4 ++-- 4 files changed, 6 insertions(+), 7 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index b668c04d5d65..d9a55fa405e8 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:ed1f9983d5a935a89fe8085e8bb97d94e41015252c5b6c9771257cf8624367e6 - + digest: sha256:660abdf857d3ab9aabcd967c163c70e657fcc5653595c709263af5f3fa23ef67 diff --git a/packages/db-dtypes/.github/workflows/docs.yml b/packages/db-dtypes/.github/workflows/docs.yml index f7b8344c4500..cca4e98bf236 100644 --- a/packages/db-dtypes/.github/workflows/docs.yml +++ b/packages/db-dtypes/.github/workflows/docs.yml @@ -10,7 +10,7 @@ jobs: - name: Checkout uses: actions/checkout@v2 - name: Setup Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: "3.10" - name: Install nox @@ -26,7 +26,7 @@ jobs: - name: Checkout uses: actions/checkout@v2 - name: Setup Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: "3.10" - name: Install nox diff --git a/packages/db-dtypes/.github/workflows/lint.yml b/packages/db-dtypes/.github/workflows/lint.yml index 1e8b05c3d7ff..f687324ef2eb 100644 --- a/packages/db-dtypes/.github/workflows/lint.yml +++ b/packages/db-dtypes/.github/workflows/lint.yml @@ -10,7 +10,7 @@ jobs: - name: Checkout uses: actions/checkout@v2 - name: Setup Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: "3.10" - name: Install nox diff --git a/packages/db-dtypes/.github/workflows/unittest.yml b/packages/db-dtypes/.github/workflows/unittest.yml index 074ee2504ca5..d3003e09e0c6 100644 --- a/packages/db-dtypes/.github/workflows/unittest.yml +++ b/packages/db-dtypes/.github/workflows/unittest.yml @@ -13,7 +13,7 @@ jobs: - name: Checkout uses: actions/checkout@v2 - name: Setup Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: ${{ matrix.python }} - name: Install nox @@ -39,7 +39,7 @@ jobs: - name: Checkout uses: actions/checkout@v2 - name: Setup Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: "3.10" - name: Install coverage From 3798f0f89556ab26e7172eeb8130aebdfbb7e720 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 2 Mar 2022 19:46:57 -0500 Subject: [PATCH 044/210] chore(deps): update actions/checkout action to v3 (#73) Source-Link: https://github.com/googleapis/synthtool/commit/ca879097772aeec2cbb971c3cea8ecc81522b68a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:6162c384d685c5fe22521d3f37f6fc732bf99a085f6d47b677dbcae97fc21392 Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 2 +- packages/db-dtypes/.github/workflows/docs.yml | 4 ++-- packages/db-dtypes/.github/workflows/lint.yml | 2 +- packages/db-dtypes/.github/workflows/unittest.yml | 4 ++-- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index d9a55fa405e8..480226ac08a9 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:660abdf857d3ab9aabcd967c163c70e657fcc5653595c709263af5f3fa23ef67 + digest: sha256:6162c384d685c5fe22521d3f37f6fc732bf99a085f6d47b677dbcae97fc21392 diff --git a/packages/db-dtypes/.github/workflows/docs.yml b/packages/db-dtypes/.github/workflows/docs.yml index cca4e98bf236..b46d7305d8cf 100644 --- a/packages/db-dtypes/.github/workflows/docs.yml +++ b/packages/db-dtypes/.github/workflows/docs.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Setup Python uses: actions/setup-python@v3 with: @@ -24,7 +24,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Setup Python uses: actions/setup-python@v3 with: diff --git a/packages/db-dtypes/.github/workflows/lint.yml b/packages/db-dtypes/.github/workflows/lint.yml index f687324ef2eb..f512a4960beb 100644 --- a/packages/db-dtypes/.github/workflows/lint.yml +++ b/packages/db-dtypes/.github/workflows/lint.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Setup Python uses: actions/setup-python@v3 with: diff --git a/packages/db-dtypes/.github/workflows/unittest.yml b/packages/db-dtypes/.github/workflows/unittest.yml index d3003e09e0c6..e87fe5b7b79a 100644 --- a/packages/db-dtypes/.github/workflows/unittest.yml +++ b/packages/db-dtypes/.github/workflows/unittest.yml @@ -11,7 +11,7 @@ jobs: python: ['3.6', '3.7', '3.8', '3.9', '3.10'] steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Setup Python uses: actions/setup-python@v3 with: @@ -37,7 +37,7 @@ jobs: - unit steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Setup Python uses: actions/setup-python@v3 with: From 35491abafbdd2f86d3c2ece3b68712b8a6df1e72 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 4 Mar 2022 12:06:29 -0500 Subject: [PATCH 045/210] chore: Adding support for pytest-xdist and pytest-parallel (#76) Source-Link: https://github.com/googleapis/synthtool/commit/82f5cb283efffe96e1b6cd634738e0e7de2cd90a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:5d8da01438ece4021d135433f2cf3227aa39ef0eaccc941d62aa35e6902832ae Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 2 +- .../db-dtypes/samples/snippets/noxfile.py | 78 +++++++++++-------- 2 files changed, 45 insertions(+), 35 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 480226ac08a9..7e08e05a380c 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:6162c384d685c5fe22521d3f37f6fc732bf99a085f6d47b677dbcae97fc21392 + digest: sha256:5d8da01438ece4021d135433f2cf3227aa39ef0eaccc941d62aa35e6902832ae diff --git a/packages/db-dtypes/samples/snippets/noxfile.py b/packages/db-dtypes/samples/snippets/noxfile.py index 20cdfc620138..85f5836dba3a 100644 --- a/packages/db-dtypes/samples/snippets/noxfile.py +++ b/packages/db-dtypes/samples/snippets/noxfile.py @@ -188,42 +188,52 @@ def _session_tests( # check for presence of tests test_list = glob.glob("*_test.py") + glob.glob("test_*.py") test_list.extend(glob.glob("tests")) + if len(test_list) == 0: print("No tests found, skipping directory.") - else: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install( - "-r", "requirements-test.txt", "-c", "constraints-test.txt" - ) - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + return + + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + concurrent_args = [] + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + with open("requirements.txt") as rfile: + packages = rfile.read() + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") + else: + session.install("-r", "requirements-test.txt") + with open("requirements-test.txt") as rtfile: + packages += rtfile.read() + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + if "pytest-parallel" in packages: + concurrent_args.extend(["--workers", "auto", "--tests-per-worker", "auto"]) + elif "pytest-xdist" in packages: + concurrent_args.extend(["-n", "auto"]) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) From 85ad4689c4172b4ce29b100663a846ed5a921b63 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 5 Mar 2022 00:36:25 +0000 Subject: [PATCH 046/210] chore(deps): update actions/download-artifact action to v3 (#77) Source-Link: https://github.com/googleapis/synthtool/commit/38e11ad1104dcc1e63b52691ddf2fe4015d06955 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:4e1991042fe54b991db9ca17c8fb386e61b22fe4d1472a568bf0fcac85dcf5d3 --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 2 +- packages/db-dtypes/.github/workflows/unittest.yml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 7e08e05a380c..44c78f7cc12d 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:5d8da01438ece4021d135433f2cf3227aa39ef0eaccc941d62aa35e6902832ae + digest: sha256:4e1991042fe54b991db9ca17c8fb386e61b22fe4d1472a568bf0fcac85dcf5d3 diff --git a/packages/db-dtypes/.github/workflows/unittest.yml b/packages/db-dtypes/.github/workflows/unittest.yml index e87fe5b7b79a..e5be6edbd54d 100644 --- a/packages/db-dtypes/.github/workflows/unittest.yml +++ b/packages/db-dtypes/.github/workflows/unittest.yml @@ -26,7 +26,7 @@ jobs: run: | nox -s unit-${{ matrix.python }} - name: Upload coverage results - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage-artifacts path: .coverage-${{ matrix.python }} @@ -47,7 +47,7 @@ jobs: python -m pip install --upgrade setuptools pip wheel python -m pip install coverage - name: Download coverage results - uses: actions/download-artifact@v2 + uses: actions/download-artifact@v3 with: name: coverage-artifacts path: .coverage-results/ From 6133012f92fafcd476257a7908a1db96f7e6c172 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Thu, 10 Mar 2022 13:41:10 -0600 Subject: [PATCH 047/210] fix: correct TypeError and comparison issues discovered in DateArray compliance tests (#79) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: address failing compliance tests in DateArray and TimeArray test: add a test session with prerelease versions of dependencies * fix min/max/median for 2D arrays * fixes except for null contains * actually use NaT as 'advertised' * fix!: use `pandas.NaT` for missing values in dbdate and dbtime dtypes This makes them consistent with other date/time dtypes, as well as internally consistent with the advertised `dtype.na_value`. BREAKING-CHANGE: dbdate and dbtime dtypes return NaT instead of None for missing values Release-As: 0.4.0 * more progress towards compliance * address errors in TestMethods * fix: correct dtype and interface compliance errors in DateArray * add compliance tests to github actions * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * split coverage * add nox session back * fix unit session * move compliance tests and remove unnecessary test * no need for coverage upload * fix coverage * restore coverage * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../.github/workflows/compliance.yml | 27 ++++++++++ packages/db-dtypes/db_dtypes/core.py | 16 ++++-- packages/db-dtypes/noxfile.py | 13 +++-- packages/db-dtypes/owlbot.py | 28 ++++++++++ .../db-dtypes/tests/compliance/conftest.py | 53 +++++++++++++++++++ .../tests/compliance/date/conftest.py | 47 ++++++++++++++++ .../compliance/date/test_date_compliance.py | 47 ++++++++++++++++ packages/db-dtypes/tests/unit/test_date.py | 16 +++++- 8 files changed, 239 insertions(+), 8 deletions(-) create mode 100644 packages/db-dtypes/.github/workflows/compliance.yml create mode 100644 packages/db-dtypes/tests/compliance/conftest.py create mode 100644 packages/db-dtypes/tests/compliance/date/conftest.py create mode 100644 packages/db-dtypes/tests/compliance/date/test_date_compliance.py diff --git a/packages/db-dtypes/.github/workflows/compliance.yml b/packages/db-dtypes/.github/workflows/compliance.yml new file mode 100644 index 000000000000..77e6b050f97b --- /dev/null +++ b/packages/db-dtypes/.github/workflows/compliance.yml @@ -0,0 +1,27 @@ +on: + pull_request: + branches: + - main +name: unittest +jobs: + compliance: + runs-on: ubuntu-latest + strategy: + matrix: + python: ['3.10'] + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Setup Python + uses: actions/setup-python@v3 + with: + python-version: ${{ matrix.python }} + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run compliance tests + env: + COVERAGE_FILE: .coverage-compliance-${{ matrix.python }} + run: | + nox -s compliance diff --git a/packages/db-dtypes/db_dtypes/core.py b/packages/db-dtypes/db_dtypes/core.py index a06c6d662a1e..b5b0b7a0377f 100644 --- a/packages/db-dtypes/db_dtypes/core.py +++ b/packages/db-dtypes/db_dtypes/core.py @@ -17,7 +17,7 @@ import numpy import pandas import pandas.api.extensions -from pandas.api.types import is_dtype_equal, is_list_like, pandas_dtype +from pandas.api.types import is_dtype_equal, is_list_like, is_scalar, pandas_dtype from db_dtypes import pandas_backports @@ -31,9 +31,14 @@ class BaseDatetimeDtype(pandas.api.extensions.ExtensionDtype): names = None @classmethod - def construct_from_string(cls, name): + def construct_from_string(cls, name: str): + if not isinstance(name, str): + raise TypeError( + f"'construct_from_string' expects a string, got {type(name)}" + ) + if name != cls.name: - raise TypeError() + raise TypeError(f"Cannot construct a '{cls.__name__}' from 'another_type'") return cls() @@ -74,6 +79,11 @@ def astype(self, dtype, copy=True): return super().astype(dtype, copy=copy) def _cmp_method(self, other, op): + """Compare array values, for use in OpsMixin.""" + + if is_scalar(other) and (pandas.isna(other) or type(other) == self.dtype.type): + other = type(self)([other]) + oshape = getattr(other, "shape", None) if oshape != self.shape and oshape != (1,) and self.shape != (1,): raise TypeError( diff --git a/packages/db-dtypes/noxfile.py b/packages/db-dtypes/noxfile.py index 5f48361032c0..54421d85da52 100644 --- a/packages/db-dtypes/noxfile.py +++ b/packages/db-dtypes/noxfile.py @@ -37,6 +37,7 @@ nox.options.sessions = [ "lint", "unit", + "compliance", "cover", "lint_setup_py", "blacken", @@ -77,7 +78,7 @@ def lint_setup_py(session): session.run("python", "setup.py", "check", "--restructuredtext", "--strict") -def default(session): +def default(session, tests_path): # Install all test dependencies, then install this package in-place. constraints_path = str( @@ -106,15 +107,21 @@ def default(session): "--cov-config=.coveragerc", "--cov-report=", "--cov-fail-under=0", - os.path.join("tests", "unit"), + tests_path, *session.posargs, ) +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS[-1]) +def compliance(session): + """Run the compliance test suite.""" + default(session, os.path.join("tests", "compliance")) + + @nox.session(python=UNIT_TEST_PYTHON_VERSIONS) def unit(session): """Run the unit test suite.""" - default(session) + default(session, os.path.join("tests", "unit")) @nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) diff --git a/packages/db-dtypes/owlbot.py b/packages/db-dtypes/owlbot.py index 30f3b3d7cf2e..6c596714f6c9 100644 --- a/packages/db-dtypes/owlbot.py +++ b/packages/db-dtypes/owlbot.py @@ -64,11 +64,39 @@ new_sessions = """ "lint", "unit", + "compliance", "cover", """ s.replace(["noxfile.py"], old_sessions, new_sessions) +# Add compliance tests. +s.replace( + ["noxfile.py"], r"def default\(session\):", "def default(session, tests_path):" +) +s.replace(["noxfile.py"], r'os.path.join\("tests", "unit"\),', "tests_path,") +s.replace( + ["noxfile.py"], + r''' +@nox.session\(python=UNIT_TEST_PYTHON_VERSIONS\) +def unit\(session\): + """Run the unit test suite.""" + default\(session\) +''', + ''' +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS[-1]) +def compliance(session): + """Run the compliance test suite.""" + default(session, os.path.join("tests", "compliance")) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +def unit(session): + """Run the unit test suite.""" + default(session, os.path.join("tests", "unit")) +''', +) + # ---------------------------------------------------------------------------- # Samples templates # ---------------------------------------------------------------------------- diff --git a/packages/db-dtypes/tests/compliance/conftest.py b/packages/db-dtypes/tests/compliance/conftest.py new file mode 100644 index 000000000000..bc76692dba04 --- /dev/null +++ b/packages/db-dtypes/tests/compliance/conftest.py @@ -0,0 +1,53 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pandas +import pytest + + +@pytest.fixture(params=["ffill", "bfill"]) +def fillna_method(request): + """ + Parametrized fixture giving method parameters 'ffill' and 'bfill' for + Series.fillna(method=) testing. + + See: + https://github.com/pandas-dev/pandas/blob/main/pandas/tests/extension/conftest.py + """ + return request.param + + +@pytest.fixture +def na_value(): + return pandas.NaT + + +@pytest.fixture +def na_cmp(): + """ + Binary operator for comparing NA values. + + Should return a function of two arguments that returns + True if both arguments are (scalar) NA for your type. + + See: + https://github.com/pandas-dev/pandas/blob/main/pandas/tests/extension/conftest.py + and + https://github.com/pandas-dev/pandas/blob/main/pandas/tests/extension/test_datetime.py + """ + + def cmp(a, b): + return a is pandas.NaT and a is b + + return cmp diff --git a/packages/db-dtypes/tests/compliance/date/conftest.py b/packages/db-dtypes/tests/compliance/date/conftest.py new file mode 100644 index 000000000000..e25ccc9960a8 --- /dev/null +++ b/packages/db-dtypes/tests/compliance/date/conftest.py @@ -0,0 +1,47 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime + +import numpy +import pytest + +from db_dtypes import DateArray, DateDtype + + +@pytest.fixture +def data(): + return DateArray( + numpy.arange( + datetime.datetime(1900, 1, 1), + datetime.datetime(2099, 12, 31), + datetime.timedelta(days=731), + dtype="datetime64[ns]", + ) + ) + + +@pytest.fixture +def data_missing(): + """Length-2 array with [NA, Valid] + + See: + https://github.com/pandas-dev/pandas/blob/main/pandas/tests/extension/conftest.py + """ + return DateArray([None, datetime.date(2022, 1, 27)]) + + +@pytest.fixture +def dtype(): + return DateDtype() diff --git a/packages/db-dtypes/tests/compliance/date/test_date_compliance.py b/packages/db-dtypes/tests/compliance/date/test_date_compliance.py new file mode 100644 index 000000000000..a805ecd7b9d8 --- /dev/null +++ b/packages/db-dtypes/tests/compliance/date/test_date_compliance.py @@ -0,0 +1,47 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Tests for extension interface compliance, inherited from pandas. + +See: +https://github.com/pandas-dev/pandas/blob/main/pandas/tests/extension/decimal/test_decimal.py +and +https://github.com/pandas-dev/pandas/blob/main/pandas/tests/extension/test_period.py +""" + +from pandas.tests.extension import base + + +class TestDtype(base.BaseDtypeTests): + pass + + +class TestInterface(base.BaseInterfaceTests): + pass + + +class TestConstructors(base.BaseConstructorsTests): + pass + + +class TestReshaping(base.BaseReshapingTests): + pass + + +class TestGetitem(base.BaseGetitemTests): + pass + + +class TestMissing(base.BaseMissingTests): + pass diff --git a/packages/db-dtypes/tests/unit/test_date.py b/packages/db-dtypes/tests/unit/test_date.py index bf877eac31ed..bce2dc1d1e30 100644 --- a/packages/db-dtypes/tests/unit/test_date.py +++ b/packages/db-dtypes/tests/unit/test_date.py @@ -13,15 +13,27 @@ # limitations under the License. import datetime +import operator import pandas +import pandas.testing import pytest -# To register the types. -import db_dtypes # noqa +import db_dtypes from db_dtypes import pandas_backports +def test_construct_from_string_with_nonstring(): + with pytest.raises(TypeError): + db_dtypes.DateDtype.construct_from_string(object()) + + +def test__cmp_method_with_scalar(): + input_array = db_dtypes.DateArray([datetime.date(1900, 1, 1)]) + got = input_array._cmp_method(datetime.date(1900, 1, 1), operator.eq) + assert got[0] + + @pytest.mark.parametrize( "value, expected", [ From e3414ccedc6d68f725c344e9416878a9fd38f529 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sun, 13 Mar 2022 17:04:39 +0100 Subject: [PATCH 048/210] chore(deps): update dependency pytest to v7.1.0 (#80) --- packages/db-dtypes/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/db-dtypes/samples/snippets/requirements-test.txt b/packages/db-dtypes/samples/snippets/requirements-test.txt index c2845bffbe89..824a8a7a0ce6 100644 --- a/packages/db-dtypes/samples/snippets/requirements-test.txt +++ b/packages/db-dtypes/samples/snippets/requirements-test.txt @@ -1 +1 @@ -pytest==7.0.1 +pytest==7.1.0 From f4e09e06c8a733e09e588aa9ff482b486ddc5dc6 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 18 Mar 2022 03:13:02 +0100 Subject: [PATCH 049/210] chore(deps): update dependency pytest to v7.1.1 (#83) --- packages/db-dtypes/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/db-dtypes/samples/snippets/requirements-test.txt b/packages/db-dtypes/samples/snippets/requirements-test.txt index 824a8a7a0ce6..4f6bf643fc5e 100644 --- a/packages/db-dtypes/samples/snippets/requirements-test.txt +++ b/packages/db-dtypes/samples/snippets/requirements-test.txt @@ -1 +1 @@ -pytest==7.1.0 +pytest==7.1.1 From 497df99c8cb88fbaaa6c36659ada075e422fb280 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Fri, 18 Mar 2022 14:30:35 -0500 Subject: [PATCH 050/210] fix: address failing tests with pandas 1.5.0 (#82) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit test: add a test session with prerelease versions of dependencies Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: - [ ] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/python-db-dtypes-pandas/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea - [ ] Ensure the tests and linter pass - [ ] Code coverage does not decrease (if any source code was changed) - [ ] Appropriate docs were updated (if necessary) Fixes #81 🦕 --- .../.github/workflows/compliance.yml | 21 ++++ .../.github/workflows/unittest-prerelease.yml | 32 +++++++ packages/db-dtypes/db_dtypes/__init__.py | 26 +++-- packages/db-dtypes/db_dtypes/core.py | 6 ++ packages/db-dtypes/noxfile.py | 91 ++++++++++++++++++ packages/db-dtypes/owlbot.py | 96 ++++++++++++++++++- packages/db-dtypes/tests/unit/test_date.py | 24 +++++ packages/db-dtypes/tests/unit/test_time.py | 26 +++++ 8 files changed, 314 insertions(+), 8 deletions(-) create mode 100644 packages/db-dtypes/.github/workflows/unittest-prerelease.yml diff --git a/packages/db-dtypes/.github/workflows/compliance.yml b/packages/db-dtypes/.github/workflows/compliance.yml index 77e6b050f97b..eca8cc202617 100644 --- a/packages/db-dtypes/.github/workflows/compliance.yml +++ b/packages/db-dtypes/.github/workflows/compliance.yml @@ -25,3 +25,24 @@ jobs: COVERAGE_FILE: .coverage-compliance-${{ matrix.python }} run: | nox -s compliance + compliance-prerelease: + runs-on: ubuntu-latest + strategy: + matrix: + python: ['3.10'] + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Setup Python + uses: actions/setup-python@v3 + with: + python-version: ${{ matrix.python }} + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run compliance prerelease tests + env: + COVERAGE_FILE: .coverage-compliance-prerelease-${{ matrix.python }} + run: | + nox -s compliance_prerelease diff --git a/packages/db-dtypes/.github/workflows/unittest-prerelease.yml b/packages/db-dtypes/.github/workflows/unittest-prerelease.yml new file mode 100644 index 000000000000..a11568ace889 --- /dev/null +++ b/packages/db-dtypes/.github/workflows/unittest-prerelease.yml @@ -0,0 +1,32 @@ +on: + pull_request: + branches: + - main +name: unittest-prerelease +jobs: + unit: + runs-on: ubuntu-latest + strategy: + matrix: + python: ['3.10'] + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Setup Python + uses: actions/setup-python@v3 + with: + python-version: ${{ matrix.python }} + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run unit tests + env: + COVERAGE_FILE: .coverage-prerelease-${{ matrix.python }} + run: | + nox -s unit_prerelease + - name: Upload coverage results + uses: actions/upload-artifact@v3 + with: + name: coverage-artifacts + path: .coverage-${{ matrix.python }} diff --git a/packages/db-dtypes/db_dtypes/__init__.py b/packages/db-dtypes/db_dtypes/__init__.py index a222e6dee1ba..d8e2ae543e75 100644 --- a/packages/db-dtypes/db_dtypes/__init__.py +++ b/packages/db-dtypes/db_dtypes/__init__.py @@ -34,6 +34,14 @@ time_dtype_name = "dbtime" _EPOCH = datetime.datetime(1970, 1, 1) _NPEPOCH = numpy.datetime64(_EPOCH) +_NP_DTYPE = "datetime64[ns]" + +# Numpy converts datetime64 scalars to datetime.datetime only if microsecond or +# smaller precision is used. +# +# TODO(https://github.com/googleapis/python-db-dtypes-pandas/issues/63): Keep +# nanosecond precision when boxing scalars. +_NP_BOX_DTYPE = "datetime64[us]" pandas_release = packaging.version.parse(pandas.__version__).release @@ -149,12 +157,14 @@ def _box_func(self, x): return pandas.NaT try: - return x.astype(" Date: Mon, 21 Mar 2022 15:19:02 -0500 Subject: [PATCH 051/210] fix: dbdate and dbtime support set item with null values (#85) feat: dbdate and dbtime support numpy.datetime64 values in array constructor --- packages/db-dtypes/db_dtypes/__init__.py | 10 ++- packages/db-dtypes/db_dtypes/core.py | 18 ++-- .../db-dtypes/db_dtypes/pandas_backports.py | 2 +- packages/db-dtypes/tests/unit/test_date.py | 82 +++++++++++++++---- packages/db-dtypes/tests/unit/test_time.py | 6 ++ 5 files changed, 91 insertions(+), 27 deletions(-) diff --git a/packages/db-dtypes/db_dtypes/__init__.py b/packages/db-dtypes/db_dtypes/__init__.py index d8e2ae543e75..7889dac672bf 100644 --- a/packages/db-dtypes/db_dtypes/__init__.py +++ b/packages/db-dtypes/db_dtypes/__init__.py @@ -106,6 +106,9 @@ def _datetime( r"(?:\.(?P\d*))?)?)?\s*$" ).match, ) -> Optional[numpy.datetime64]: + if isinstance(scalar, numpy.datetime64): + return scalar + # Convert pyarrow values to datetime.time. if isinstance(scalar, (pyarrow.Time32Scalar, pyarrow.Time64Scalar)): scalar = ( @@ -116,7 +119,7 @@ def _datetime( ) if pandas.isna(scalar): - return None + return numpy.datetime64("NaT") if isinstance(scalar, datetime.time): return pandas.Timestamp( year=1970, @@ -238,12 +241,15 @@ def _datetime( scalar, match_fn=re.compile(r"\s*(?P\d+)-(?P\d+)-(?P\d+)\s*$").match, ) -> Optional[numpy.datetime64]: + if isinstance(scalar, numpy.datetime64): + return scalar + # Convert pyarrow values to datetime.date. if isinstance(scalar, (pyarrow.Date32Scalar, pyarrow.Date64Scalar)): scalar = scalar.as_py() if pandas.isna(scalar): - return None + return numpy.datetime64("NaT") elif isinstance(scalar, datetime.date): return pandas.Timestamp( year=scalar.year, month=scalar.month, day=scalar.day diff --git a/packages/db-dtypes/db_dtypes/core.py b/packages/db-dtypes/db_dtypes/core.py index 14d76aa3404a..7879571cb645 100644 --- a/packages/db-dtypes/db_dtypes/core.py +++ b/packages/db-dtypes/db_dtypes/core.py @@ -100,14 +100,6 @@ def _cmp_method(self, other, op): return NotImplemented return op(self._ndarray, other._ndarray) - def __setitem__(self, key, value): - if is_list_like(value): - _datetime = self._datetime - value = [_datetime(v) for v in value] - elif not pandas.isna(value): - value = self._datetime(value) - return super().__setitem__(key, value) - def _from_factorized(self, unique, original): return self.__class__(unique) @@ -121,6 +113,16 @@ def _validate_scalar(self, value): """ return self._datetime(value) + def _validate_setitem_value(self, value): + """ + Convert a value for use in setting a value in the backing numpy array. + """ + if is_list_like(value): + _datetime = self._datetime + return [_datetime(v) for v in value] + + return self._datetime(value) + def any( self, *, diff --git a/packages/db-dtypes/db_dtypes/pandas_backports.py b/packages/db-dtypes/db_dtypes/pandas_backports.py index f53adff3ddda..0e3998651150 100644 --- a/packages/db-dtypes/db_dtypes/pandas_backports.py +++ b/packages/db-dtypes/db_dtypes/pandas_backports.py @@ -126,7 +126,7 @@ def __getitem__(self, index): return self.__class__(value, self._dtype) def __setitem__(self, index, value): - self._ndarray[index] = value + self._ndarray[index] = self._validate_setitem_value(value) def __len__(self): return len(self._ndarray) diff --git a/packages/db-dtypes/tests/unit/test_date.py b/packages/db-dtypes/tests/unit/test_date.py index 79c97acd5088..fb41620ed217 100644 --- a/packages/db-dtypes/tests/unit/test_date.py +++ b/packages/db-dtypes/tests/unit/test_date.py @@ -24,6 +24,33 @@ from db_dtypes import pandas_backports +VALUE_PARSING_TEST_CASES = [ + # Min/Max values for pandas.Timestamp. + ("1677-09-22", datetime.date(1677, 9, 22)), + ("2262-04-11", datetime.date(2262, 4, 11)), + # Typical "zero" values. + ("1900-01-01", datetime.date(1900, 1, 1)), + ("1970-01-01", datetime.date(1970, 1, 1)), + # Assorted values. + ("1993-10-31", datetime.date(1993, 10, 31)), + (datetime.date(1993, 10, 31), datetime.date(1993, 10, 31)), + ("2012-02-29", datetime.date(2012, 2, 29)), + (numpy.datetime64("2012-02-29"), datetime.date(2012, 2, 29)), + ("2021-12-17", datetime.date(2021, 12, 17)), + (pandas.Timestamp("2021-12-17"), datetime.date(2021, 12, 17)), + ("2038-01-19", datetime.date(2038, 1, 19)), +] + +NULL_VALUE_TEST_CASES = [ + None, + pandas.NaT, + float("nan"), +] + +if hasattr(pandas, "NA"): + NULL_VALUE_TEST_CASES.append(pandas.NA) + + def test_box_func(): input_array = db_dtypes.DateArray([]) input_datetime = datetime.datetime(2022, 3, 16) @@ -58,26 +85,49 @@ def test__cmp_method_with_scalar(): assert got[0] -@pytest.mark.parametrize( - "value, expected", - [ - # Min/Max values for pandas.Timestamp. - ("1677-09-22", datetime.date(1677, 9, 22)), - ("2262-04-11", datetime.date(2262, 4, 11)), - # Typical "zero" values. - ("1900-01-01", datetime.date(1900, 1, 1)), - ("1970-01-01", datetime.date(1970, 1, 1)), - # Assorted values. - ("1993-10-31", datetime.date(1993, 10, 31)), - ("2012-02-29", datetime.date(2012, 2, 29)), - ("2021-12-17", datetime.date(2021, 12, 17)), - ("2038-01-19", datetime.date(2038, 1, 19)), - ], -) +@pytest.mark.parametrize("value, expected", VALUE_PARSING_TEST_CASES) def test_date_parsing(value, expected): assert pandas.Series([value], dtype="dbdate")[0] == expected +@pytest.mark.parametrize("value", NULL_VALUE_TEST_CASES) +def test_date_parsing_null(value): + assert pandas.Series([value], dtype="dbdate")[0] is pandas.NaT + + +@pytest.mark.parametrize("value, expected", VALUE_PARSING_TEST_CASES) +def test_date_set_item(value, expected): + series = pandas.Series([None], dtype="dbdate") + series[0] = value + assert series[0] == expected + + +@pytest.mark.parametrize("value", NULL_VALUE_TEST_CASES) +def test_date_set_item_null(value): + series = pandas.Series(["1970-01-01"], dtype="dbdate") + series[0] = value + assert series[0] is pandas.NaT + + +def test_date_set_slice(): + series = pandas.Series([None, None, None], dtype="dbdate") + series[:] = [ + datetime.date(2022, 3, 21), + "2011-12-13", + numpy.datetime64("1998-09-04"), + ] + assert series[0] == datetime.date(2022, 3, 21) + assert series[1] == datetime.date(2011, 12, 13) + assert series[2] == datetime.date(1998, 9, 4) + + +def test_date_set_slice_null(): + series = pandas.Series(["1970-01-01"] * len(NULL_VALUE_TEST_CASES), dtype="dbdate") + series[:] = NULL_VALUE_TEST_CASES + for row_index in range(len(NULL_VALUE_TEST_CASES)): + assert series[row_index] is pandas.NaT + + @pytest.mark.parametrize( "value, error", [ diff --git a/packages/db-dtypes/tests/unit/test_time.py b/packages/db-dtypes/tests/unit/test_time.py index db533f569175..bdfc48b93f76 100644 --- a/packages/db-dtypes/tests/unit/test_time.py +++ b/packages/db-dtypes/tests/unit/test_time.py @@ -73,8 +73,14 @@ def test_box_func(): # Fractional seconds can cause rounding problems if cast to float. See: # https://github.com/googleapis/python-db-dtypes-pandas/issues/18 ("0:0:59.876543", datetime.time(0, 0, 59, 876543)), + ( + numpy.datetime64("1970-01-01 00:00:59.876543"), + datetime.time(0, 0, 59, 876543), + ), ("01:01:01.010101", datetime.time(1, 1, 1, 10101)), + (pandas.Timestamp("1970-01-01 01:01:01.010101"), datetime.time(1, 1, 1, 10101)), ("09:09:09.090909", datetime.time(9, 9, 9, 90909)), + (datetime.time(9, 9, 9, 90909), datetime.time(9, 9, 9, 90909)), ("11:11:11.111111", datetime.time(11, 11, 11, 111111)), ("19:16:23.987654", datetime.time(19, 16, 23, 987654)), # Microsecond precision From 887c695fa7dbd611be4206194fc70b87ba5d8ddb Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Thu, 24 Mar 2022 09:47:20 -0500 Subject: [PATCH 052/210] fix: address failing 2D array compliance tests in DateArray (#64) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: address failing compliance tests in DateArray and TimeArray test: add a test session with prerelease versions of dependencies * fix min/max/median for 2D arrays * fixes except for null contains * actually use NaT as 'advertised' * fix!: use `pandas.NaT` for missing values in dbdate and dbtime dtypes This makes them consistent with other date/time dtypes, as well as internally consistent with the advertised `dtype.na_value`. BREAKING-CHANGE: dbdate and dbtime dtypes return NaT instead of None for missing values Release-As: 0.4.0 * more progress towards compliance * address errors in TestMethods * move tests * add prerelease deps * fix: address failing tests with pandas 1.5.0 test: add a test session with prerelease versions of dependencies * fix owlbot config * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * document why microsecond precision is used * use correct units * add box_func tests * typo * add unit tests Co-authored-by: Owl Bot --- packages/db-dtypes/db_dtypes/core.py | 44 ++--- .../db-dtypes/db_dtypes/pandas_backports.py | 4 - .../date/test_date_compliance_1_5.py | 35 ++++ packages/db-dtypes/tests/unit/test_date.py | 150 ++++++++++++++++++ 4 files changed, 210 insertions(+), 23 deletions(-) create mode 100644 packages/db-dtypes/tests/compliance/date/test_date_compliance_1_5.py diff --git a/packages/db-dtypes/db_dtypes/core.py b/packages/db-dtypes/db_dtypes/core.py index 7879571cb645..5d5c053d2313 100644 --- a/packages/db-dtypes/db_dtypes/core.py +++ b/packages/db-dtypes/db_dtypes/core.py @@ -152,29 +152,35 @@ def min(self, *, axis: Optional[int] = None, skipna: bool = True, **kwargs): result = pandas_backports.nanmin( values=self._ndarray, axis=axis, mask=self.isna(), skipna=skipna ) - return self._box_func(result) + if axis is None or self.ndim == 1: + return self._box_func(result) + return self._from_backing_data(result) def max(self, *, axis: Optional[int] = None, skipna: bool = True, **kwargs): pandas_backports.numpy_validate_max((), kwargs) result = pandas_backports.nanmax( values=self._ndarray, axis=axis, mask=self.isna(), skipna=skipna ) - return self._box_func(result) - - if pandas_release >= (1, 2): - - def median( - self, - *, - axis: Optional[int] = None, - out=None, - overwrite_input: bool = False, - keepdims: bool = False, - skipna: bool = True, - ): - pandas_backports.numpy_validate_median( - (), - {"out": out, "overwrite_input": overwrite_input, "keepdims": keepdims}, - ) - result = pandas_backports.nanmedian(self._ndarray, axis=axis, skipna=skipna) + if axis is None or self.ndim == 1: + return self._box_func(result) + return self._from_backing_data(result) + + def median( + self, + *, + axis: Optional[int] = None, + out=None, + overwrite_input: bool = False, + keepdims: bool = False, + skipna: bool = True, + ): + if not hasattr(pandas_backports, "numpy_validate_median"): + raise NotImplementedError("Need pandas 1.3 or later to calculate median.") + + pandas_backports.numpy_validate_median( + (), {"out": out, "overwrite_input": overwrite_input, "keepdims": keepdims}, + ) + result = pandas_backports.nanmedian(self._ndarray, axis=axis, skipna=skipna) + if axis is None or self.ndim == 1: return self._box_func(result) + return self._from_backing_data(result) diff --git a/packages/db-dtypes/db_dtypes/pandas_backports.py b/packages/db-dtypes/db_dtypes/pandas_backports.py index 0e3998651150..0966e8319f07 100644 --- a/packages/db-dtypes/db_dtypes/pandas_backports.py +++ b/packages/db-dtypes/db_dtypes/pandas_backports.py @@ -106,12 +106,8 @@ def __ge__(self, other): # See: https://github.com/pandas-dev/pandas/pull/45544 @import_default("pandas.core.arrays._mixins", pandas_release < (1, 3)) class NDArrayBackedExtensionArray(pandas.core.arrays.base.ExtensionArray): - - ndim = 1 - def __init__(self, values, dtype): assert isinstance(values, numpy.ndarray) - assert values.ndim == 1 self._ndarray = values self._dtype = dtype diff --git a/packages/db-dtypes/tests/compliance/date/test_date_compliance_1_5.py b/packages/db-dtypes/tests/compliance/date/test_date_compliance_1_5.py new file mode 100644 index 000000000000..9c6da243f2df --- /dev/null +++ b/packages/db-dtypes/tests/compliance/date/test_date_compliance_1_5.py @@ -0,0 +1,35 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Tests for extension interface compliance, inherited from pandas. + +See: +https://github.com/pandas-dev/pandas/blob/main/pandas/tests/extension/decimal/test_decimal.py +and +https://github.com/pandas-dev/pandas/blob/main/pandas/tests/extension/test_period.py +""" + +from pandas.tests.extension import base +import pytest + +# NDArrayBacked2DTests suite added in https://github.com/pandas-dev/pandas/pull/44974 +pytest.importorskip("pandas", minversion="1.5.0dev") + + +class Test2DCompat(base.NDArrayBacked2DTests): + pass + + +class TestIndex(base.BaseIndexTests): + pass diff --git a/packages/db-dtypes/tests/unit/test_date.py b/packages/db-dtypes/tests/unit/test_date.py index fb41620ed217..b8f36f627ed3 100644 --- a/packages/db-dtypes/tests/unit/test_date.py +++ b/packages/db-dtypes/tests/unit/test_date.py @@ -16,6 +16,7 @@ import operator import numpy +import numpy.testing import pandas import pandas.testing import pytest @@ -154,6 +155,100 @@ def test_date_parsing_errors(value, error): pandas.Series([value], dtype="dbdate") +def test_date_max_2d(): + input_array = db_dtypes.DateArray( + numpy.array( + [ + [ + numpy.datetime64("1970-01-01"), + numpy.datetime64("1980-02-02"), + numpy.datetime64("1990-03-03"), + ], + [ + numpy.datetime64("1971-02-02"), + numpy.datetime64("1981-03-03"), + numpy.datetime64("1991-04-04"), + ], + [ + numpy.datetime64("1972-03-03"), + numpy.datetime64("1982-04-04"), + numpy.datetime64("1992-05-05"), + ], + ], + dtype="datetime64[ns]", + ) + ) + numpy.testing.assert_array_equal( + input_array.max(axis=0)._ndarray, + numpy.array( + [ + numpy.datetime64("1972-03-03"), + numpy.datetime64("1982-04-04"), + numpy.datetime64("1992-05-05"), + ], + dtype="datetime64[ns]", + ), + ) + numpy.testing.assert_array_equal( + input_array.max(axis=1)._ndarray, + numpy.array( + [ + numpy.datetime64("1990-03-03"), + numpy.datetime64("1991-04-04"), + numpy.datetime64("1992-05-05"), + ], + dtype="datetime64[ns]", + ), + ) + + +def test_date_min_2d(): + input_array = db_dtypes.DateArray( + numpy.array( + [ + [ + numpy.datetime64("1970-01-01"), + numpy.datetime64("1980-02-02"), + numpy.datetime64("1990-03-03"), + ], + [ + numpy.datetime64("1971-02-02"), + numpy.datetime64("1981-03-03"), + numpy.datetime64("1991-04-04"), + ], + [ + numpy.datetime64("1972-03-03"), + numpy.datetime64("1982-04-04"), + numpy.datetime64("1992-05-05"), + ], + ], + dtype="datetime64[ns]", + ) + ) + numpy.testing.assert_array_equal( + input_array.min(axis=0)._ndarray, + numpy.array( + [ + numpy.datetime64("1970-01-01"), + numpy.datetime64("1980-02-02"), + numpy.datetime64("1990-03-03"), + ], + dtype="datetime64[ns]", + ), + ) + numpy.testing.assert_array_equal( + input_array.min(axis=1)._ndarray, + numpy.array( + [ + numpy.datetime64("1970-01-01"), + numpy.datetime64("1971-02-02"), + numpy.datetime64("1972-03-03"), + ], + dtype="datetime64[ns]", + ), + ) + + @pytest.mark.skipif( not hasattr(pandas_backports, "numpy_validate_median"), reason="median not available with this version of pandas", @@ -178,3 +273,58 @@ def test_date_parsing_errors(value, error): def test_date_median(values, expected): series = pandas.Series(values, dtype="dbdate") assert series.median() == expected + + +@pytest.mark.skipif( + not hasattr(pandas_backports, "numpy_validate_median"), + reason="median not available with this version of pandas", +) +def test_date_median_2d(): + input_array = db_dtypes.DateArray( + numpy.array( + [ + [ + numpy.datetime64("1970-01-01"), + numpy.datetime64("1980-02-02"), + numpy.datetime64("1990-03-03"), + ], + [ + numpy.datetime64("1971-02-02"), + numpy.datetime64("1981-03-03"), + numpy.datetime64("1991-04-04"), + ], + [ + numpy.datetime64("1972-03-03"), + numpy.datetime64("1982-04-04"), + numpy.datetime64("1992-05-05"), + ], + ], + dtype="datetime64[ns]", + ) + ) + pandas.testing.assert_extension_array_equal( + input_array.median(axis=0), + db_dtypes.DateArray( + numpy.array( + [ + numpy.datetime64("1971-02-02"), + numpy.datetime64("1981-03-03"), + numpy.datetime64("1991-04-04"), + ], + dtype="datetime64[ns]", + ) + ), + ) + pandas.testing.assert_extension_array_equal( + input_array.median(axis=1), + db_dtypes.DateArray( + numpy.array( + [ + numpy.datetime64("1980-02-02"), + numpy.datetime64("1981-03-03"), + numpy.datetime64("1982-04-04"), + ], + dtype="datetime64[ns]", + ) + ), + ) From ac6f4789025253789833a44c4293e99f4ed3a61b Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Thu, 24 Mar 2022 15:08:16 -0500 Subject: [PATCH 053/210] fix: avoid TypeError when using sorted search (#84) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: address failing compliance tests in DateArray and TimeArray test: add a test session with prerelease versions of dependencies * fix min/max/median for 2D arrays * fixes except for null contains * actually use NaT as 'advertised' * fix!: use `pandas.NaT` for missing values in dbdate and dbtime dtypes This makes them consistent with other date/time dtypes, as well as internally consistent with the advertised `dtype.na_value`. BREAKING-CHANGE: dbdate and dbtime dtypes return NaT instead of None for missing values Release-As: 0.4.0 * more progress towards compliance * address errors in TestMethods * move tests * add prerelease deps * fix: address failing tests with pandas 1.5.0 test: add a test session with prerelease versions of dependencies * fix owlbot config * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * document why microsecond precision is used * use correct units * add box_func tests * typo * fix: avoid TypeError when using sorted search * add unit tests * fix: dbdate and dbtime support set item * add TestMethods * add unit test for search sorted Co-authored-by: Owl Bot --- packages/db-dtypes/db_dtypes/core.py | 8 ++ .../db-dtypes/tests/compliance/conftest.py | 60 +++++++++++++ .../tests/compliance/date/conftest.py | 85 +++++++++++++++++++ .../compliance/date/test_date_compliance.py | 29 +++++++ packages/db-dtypes/tests/unit/test_date.py | 27 ++++++ 5 files changed, 209 insertions(+) diff --git a/packages/db-dtypes/db_dtypes/core.py b/packages/db-dtypes/db_dtypes/core.py index 5d5c053d2313..f5779602e73a 100644 --- a/packages/db-dtypes/db_dtypes/core.py +++ b/packages/db-dtypes/db_dtypes/core.py @@ -113,6 +113,14 @@ def _validate_scalar(self, value): """ return self._datetime(value) + def _validate_searchsorted_value(self, value): + """ + Convert a value for use in searching for a value in the backing numpy array. + + TODO: With pandas 2.0, this may be unnecessary. https://github.com/pandas-dev/pandas/pull/45544#issuecomment-1052809232 + """ + return self._validate_setitem_value(value) + def _validate_setitem_value(self, value): """ Convert a value for use in setting a value in the backing numpy array. diff --git a/packages/db-dtypes/tests/compliance/conftest.py b/packages/db-dtypes/tests/compliance/conftest.py index bc76692dba04..54b767cc9a95 100644 --- a/packages/db-dtypes/tests/compliance/conftest.py +++ b/packages/db-dtypes/tests/compliance/conftest.py @@ -16,6 +16,28 @@ import pytest +@pytest.fixture(params=[True, False]) +def as_frame(request): + """ + Boolean fixture to support Series and Series.to_frame() comparison testing. + + See: + https://github.com/pandas-dev/pandas/blob/main/pandas/tests/extension/conftest.py + """ + return request.param + + +@pytest.fixture(params=[True, False]) +def as_series(request): + """ + Boolean fixture to support arr and Series(arr) comparison testing. + + See: + https://github.com/pandas-dev/pandas/blob/main/pandas/tests/extension/conftest.py + """ + return request.param + + @pytest.fixture(params=["ffill", "bfill"]) def fillna_method(request): """ @@ -28,6 +50,21 @@ def fillna_method(request): return request.param +@pytest.fixture +def invalid_scalar(data): + """ + A scalar that *cannot* be held by this ExtensionArray. + + The default should work for most subclasses, but is not guaranteed. + + If the array can hold any item (i.e. object dtype), then use pytest.skip. + + See: + https://github.com/pandas-dev/pandas/blob/main/pandas/tests/extension/conftest.py + """ + return object.__new__(object) + + @pytest.fixture def na_value(): return pandas.NaT @@ -51,3 +88,26 @@ def cmp(a, b): return a is pandas.NaT and a is b return cmp + + +@pytest.fixture(params=[None, lambda x: x]) +def sort_by_key(request): + """ + Simple fixture for testing keys in sorting methods. + Tests None (no key) and the identity key. + + See: https://github.com/pandas-dev/pandas/blob/main/pandas/conftest.py + """ + return request.param + + +@pytest.fixture(params=[True, False]) +def use_numpy(request): + """ + Boolean fixture to support comparison testing of ExtensionDtype array + and numpy array. + + See: + https://github.com/pandas-dev/pandas/blob/main/pandas/tests/extension/conftest.py + """ + return request.param diff --git a/packages/db-dtypes/tests/compliance/date/conftest.py b/packages/db-dtypes/tests/compliance/date/conftest.py index e25ccc9960a8..6f0a81625b42 100644 --- a/packages/db-dtypes/tests/compliance/date/conftest.py +++ b/packages/db-dtypes/tests/compliance/date/conftest.py @@ -20,6 +20,15 @@ from db_dtypes import DateArray, DateDtype +@pytest.fixture(params=["data", "data_missing"]) +def all_data(request, data, data_missing): + """Parametrized fixture giving 'data' and 'data_missing'""" + if request.param == "data": + return data + elif request.param == "data_missing": + return data_missing + + @pytest.fixture def data(): return DateArray( @@ -32,6 +41,52 @@ def data(): ) +@pytest.fixture +def data_for_grouping(): + """ + Data for factorization, grouping, and unique tests. + + Expected to be like [B, B, NA, NA, A, A, B, C] + + Where A < B < C and NA is missing + + See: + https://github.com/pandas-dev/pandas/blob/main/pandas/tests/extension/conftest.py + """ + return DateArray( + [ + datetime.date(1980, 1, 27), + datetime.date(1980, 1, 27), + None, + None, + datetime.date(1969, 12, 30), + datetime.date(1969, 12, 30), + datetime.date(1980, 1, 27), + datetime.date(2022, 3, 18), + ] + ) + + +@pytest.fixture +def data_for_sorting(): + """ + Length-3 array with a known sort order. + + This should be three items [B, C, A] with + A < B < C + + See: + https://github.com/pandas-dev/pandas/blob/main/pandas/tests/extension/conftest.py + """ + return DateArray( + [ + datetime.date(1980, 1, 27), + datetime.date(2022, 3, 18), + datetime.date(1969, 12, 30), + ] + ) + + @pytest.fixture def data_missing(): """Length-2 array with [NA, Valid] @@ -42,6 +97,36 @@ def data_missing(): return DateArray([None, datetime.date(2022, 1, 27)]) +@pytest.fixture +def data_missing_for_sorting(): + """ + Length-3 array with a known sort order. + + This should be three items [B, NA, A] with + A < B and NA missing. + + See: + https://github.com/pandas-dev/pandas/blob/main/pandas/tests/extension/conftest.py + """ + return DateArray([datetime.date(1980, 1, 27), None, datetime.date(1969, 12, 30)]) + + +@pytest.fixture +def data_repeated(data): + """ + Generate many datasets. + + See: + https://github.com/pandas-dev/pandas/blob/main/pandas/tests/extension/conftest.py + """ + + def gen(count): + for _ in range(count): + yield data + + return gen + + @pytest.fixture def dtype(): return DateDtype() diff --git a/packages/db-dtypes/tests/compliance/date/test_date_compliance.py b/packages/db-dtypes/tests/compliance/date/test_date_compliance.py index a805ecd7b9d8..13327a700c7b 100644 --- a/packages/db-dtypes/tests/compliance/date/test_date_compliance.py +++ b/packages/db-dtypes/tests/compliance/date/test_date_compliance.py @@ -20,7 +20,11 @@ https://github.com/pandas-dev/pandas/blob/main/pandas/tests/extension/test_period.py """ +import pandas from pandas.tests.extension import base +import pytest + +import db_dtypes class TestDtype(base.BaseDtypeTests): @@ -45,3 +49,28 @@ class TestGetitem(base.BaseGetitemTests): class TestMissing(base.BaseMissingTests): pass + + +# TODO(https://github.com/googleapis/python-db-dtypes-pandas/issues/78): Add +# compliance tests for reduction operations. + + +class TestMethods(base.BaseMethodsTests): + def test_combine_add(self): + pytest.skip("Cannot add dates.") + + @pytest.mark.parametrize("dropna", [True, False]) + def test_value_counts(self, all_data, dropna): + all_data = all_data[:10] + if dropna: + # Overridden from + # https://github.com/pandas-dev/pandas/blob/main/pandas/tests/extension/base/methods.py + # to avoid difference in dtypes. + other = db_dtypes.DateArray(all_data[~all_data.isna()]) + else: + other = all_data + + result = pandas.Series(all_data).value_counts(dropna=dropna).sort_index() + expected = pandas.Series(other).value_counts(dropna=dropna).sort_index() + + self.assert_series_equal(result, expected) diff --git a/packages/db-dtypes/tests/unit/test_date.py b/packages/db-dtypes/tests/unit/test_date.py index b8f36f627ed3..bbe74cbdd448 100644 --- a/packages/db-dtypes/tests/unit/test_date.py +++ b/packages/db-dtypes/tests/unit/test_date.py @@ -328,3 +328,30 @@ def test_date_median_2d(): ) ), ) + + +@pytest.mark.parametrize( + ("search_term", "expected_index"), + ( + (datetime.date(1899, 12, 31), 0), + (datetime.date(1900, 1, 1), 0), + (datetime.date(1920, 2, 2), 1), + (datetime.date(1930, 3, 3), 1), + (datetime.date(1950, 5, 5), 2), + (datetime.date(1990, 9, 9), 3), + (datetime.date(2012, 12, 12), 3), + (datetime.date(2022, 3, 24), 4), + ), +) +def test_date_searchsorted(search_term, expected_index): + test_series = pandas.Series( + [ + datetime.date(1900, 1, 1), + datetime.date(1930, 3, 3), + datetime.date(1980, 8, 8), + datetime.date(2012, 12, 12), + ], + dtype="dbdate", + ) + got = test_series.searchsorted(search_term) + assert got == expected_index From 563cacc11cc80d0ccbc7a91f2d242384a8d2eedd Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Thu, 24 Mar 2022 15:20:19 -0500 Subject: [PATCH 054/210] fix: allow comparison with scalar values (#88) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: address failing compliance tests in DateArray and TimeArray test: add a test session with prerelease versions of dependencies * fix min/max/median for 2D arrays * fixes except for null contains * actually use NaT as 'advertised' * fix!: use `pandas.NaT` for missing values in dbdate and dbtime dtypes This makes them consistent with other date/time dtypes, as well as internally consistent with the advertised `dtype.na_value`. BREAKING-CHANGE: dbdate and dbtime dtypes return NaT instead of None for missing values Release-As: 0.4.0 * more progress towards compliance * address errors in TestMethods * move tests * add prerelease deps * fix: address failing tests with pandas 1.5.0 test: add a test session with prerelease versions of dependencies * fix owlbot config * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * document why microsecond precision is used * use correct units * add box_func tests * typo * fix: avoid TypeError when using sorted search * add unit tests * fix: dbdate and dbtime support set item * add TestMethods * fix: allow comparison with scalar values * correct behavior for comparison with different types and shapes * use same dtype in shape comparison tests Co-authored-by: Owl Bot --- packages/db-dtypes/db_dtypes/core.py | 6 +- .../db-dtypes/tests/compliance/conftest.py | 62 +++++++++++++++++++ .../compliance/date/test_date_compliance.py | 24 +++++++ packages/db-dtypes/tests/unit/test_dtypes.py | 10 +-- 4 files changed, 92 insertions(+), 10 deletions(-) diff --git a/packages/db-dtypes/db_dtypes/core.py b/packages/db-dtypes/db_dtypes/core.py index f5779602e73a..68123e188b7e 100644 --- a/packages/db-dtypes/db_dtypes/core.py +++ b/packages/db-dtypes/db_dtypes/core.py @@ -90,14 +90,14 @@ def _cmp_method(self, other, op): if is_scalar(other) and (pandas.isna(other) or type(other) == self.dtype.type): other = type(self)([other]) + if type(other) != type(self): + return NotImplemented + oshape = getattr(other, "shape", None) if oshape != self.shape and oshape != (1,) and self.shape != (1,): raise TypeError( "Can't compare arrays with different shapes", self.shape, oshape ) - - if type(other) != type(self): - return NotImplemented return op(self._ndarray, other._ndarray) def _from_factorized(self, unique, original): diff --git a/packages/db-dtypes/tests/compliance/conftest.py b/packages/db-dtypes/tests/compliance/conftest.py index 54b767cc9a95..b891ed6e7a27 100644 --- a/packages/db-dtypes/tests/compliance/conftest.py +++ b/packages/db-dtypes/tests/compliance/conftest.py @@ -12,10 +12,23 @@ # See the License for the specific language governing permissions and # limitations under the License. +import operator + import pandas import pytest +@pytest.fixture(params=[True, False]) +def as_array(request): + """ + Boolean fixture to support ExtensionDtype _from_sequence method testing. + + See: + https://github.com/pandas-dev/pandas/blob/main/pandas/tests/extension/conftest.py + """ + return request.param + + @pytest.fixture(params=[True, False]) def as_frame(request): """ @@ -38,6 +51,36 @@ def as_series(request): return request.param +@pytest.fixture(params=[True, False]) +def box_in_series(request): + """ + Whether to box the data in a Series + + See: + https://github.com/pandas-dev/pandas/blob/main/pandas/tests/extension/conftest.py + """ + return request.param + + +@pytest.fixture( + params=[ + operator.eq, + operator.ne, + operator.gt, + operator.ge, + operator.lt, + operator.le, + ] +) +def comparison_op(request): + """ + Fixture for operator module comparison functions. + + See: https://github.com/pandas-dev/pandas/blob/main/pandas/conftest.py + """ + return request.param + + @pytest.fixture(params=["ffill", "bfill"]) def fillna_method(request): """ @@ -50,6 +93,25 @@ def fillna_method(request): return request.param +@pytest.fixture( + params=[ + lambda x: 1, + lambda x: [1] * len(x), + lambda x: pandas.Series([1] * len(x)), + lambda x: x, + ], + ids=["scalar", "list", "series", "object"], +) +def groupby_apply_op(request): + """ + Functions to test groupby.apply(). + + See: + https://github.com/pandas-dev/pandas/blob/main/pandas/tests/extension/conftest.py + """ + return request.param + + @pytest.fixture def invalid_scalar(data): """ diff --git a/packages/db-dtypes/tests/compliance/date/test_date_compliance.py b/packages/db-dtypes/tests/compliance/date/test_date_compliance.py index 13327a700c7b..62819862b7ab 100644 --- a/packages/db-dtypes/tests/compliance/date/test_date_compliance.py +++ b/packages/db-dtypes/tests/compliance/date/test_date_compliance.py @@ -74,3 +74,27 @@ def test_value_counts(self, all_data, dropna): expected = pandas.Series(other).value_counts(dropna=dropna).sort_index() self.assert_series_equal(result, expected) + + +class TestCasting(base.BaseCastingTests): + pass + + +class TestGroupby(base.BaseGroupbyTests): + pass + + +class TestSetitem(base.BaseSetitemTests): + pass + + +class TestPrinting(base.BasePrintingTests): + pass + + +# TODO(https://github.com/googleapis/python-db-dtypes-pandas/issues/78): Add +# compliance tests for arithmetic operations. + + +class TestComparisonOps(base.BaseComparisonOpsTests): + pass diff --git a/packages/db-dtypes/tests/unit/test_dtypes.py b/packages/db-dtypes/tests/unit/test_dtypes.py index 66074d8eb936..dc1613be74ff 100644 --- a/packages/db-dtypes/tests/unit/test_dtypes.py +++ b/packages/db-dtypes/tests/unit/test_dtypes.py @@ -169,16 +169,12 @@ def test_timearray_comparisons( np.testing.assert_array_equal(comparisons[op](left, r), expected) np.testing.assert_array_equal(complements[op](left, r), ~expected) - # Bad shape - for bad_shape in ([], [1, 2, 3]): + # Bad shape, but same type + for bad_shape in ([], sample_values[:3]): with pytest.raises( TypeError, match="Can't compare arrays with different shapes" ): - comparisons[op](left, np.array(bad_shape)) - with pytest.raises( - TypeError, match="Can't compare arrays with different shapes" - ): - complements[op](left, np.array(bad_shape)) + comparisons[op](left, _cls(dtype)._from_sequence(bad_shape)) # Bad items for bad_items in ( From 5ef9fa15122f279113da257900be54f4f789434b Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Thu, 24 Mar 2022 15:51:52 -0500 Subject: [PATCH 055/210] test: add final dbdate compliance tests and sort (#89) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: address failing compliance tests in DateArray and TimeArray test: add a test session with prerelease versions of dependencies * fix min/max/median for 2D arrays * fixes except for null contains * actually use NaT as 'advertised' * fix!: use `pandas.NaT` for missing values in dbdate and dbtime dtypes This makes them consistent with other date/time dtypes, as well as internally consistent with the advertised `dtype.na_value`. BREAKING-CHANGE: dbdate and dbtime dtypes return NaT instead of None for missing values Release-As: 0.4.0 * more progress towards compliance * address errors in TestMethods * move tests * add prerelease deps * fix: address failing tests with pandas 1.5.0 test: add a test session with prerelease versions of dependencies * fix owlbot config * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * document why microsecond precision is used * use correct units * add box_func tests * typo * fix: avoid TypeError when using sorted search * add unit tests * fix: dbdate and dbtime support set item * add TestMethods * fix: allow comparison with scalar values * correct behavior for comparison with different types and shapes * use same dtype in shape comparison tests * test: add final dbdate compliance tests and sort * remove redundant index tests Co-authored-by: Owl Bot --- .../compliance/date/test_date_compliance.py | 43 +++++++++++-------- .../date/test_date_compliance_1_5.py | 4 -- 2 files changed, 25 insertions(+), 22 deletions(-) diff --git a/packages/db-dtypes/tests/compliance/date/test_date_compliance.py b/packages/db-dtypes/tests/compliance/date/test_date_compliance.py index 62819862b7ab..e19caf70491d 100644 --- a/packages/db-dtypes/tests/compliance/date/test_date_compliance.py +++ b/packages/db-dtypes/tests/compliance/date/test_date_compliance.py @@ -27,11 +27,18 @@ import db_dtypes -class TestDtype(base.BaseDtypeTests): +# TODO(https://github.com/googleapis/python-db-dtypes-pandas/issues/87): Add +# compliance tests for arithmetic operations. + +# TODO(https://github.com/googleapis/python-db-dtypes-pandas/issues/78): Add +# compliance tests for reduction operations. + + +class TestComparisonOps(base.BaseComparisonOpsTests): pass -class TestInterface(base.BaseInterfaceTests): +class TestCasting(base.BaseCastingTests): pass @@ -39,7 +46,7 @@ class TestConstructors(base.BaseConstructorsTests): pass -class TestReshaping(base.BaseReshapingTests): +class TestDtype(base.BaseDtypeTests): pass @@ -47,12 +54,20 @@ class TestGetitem(base.BaseGetitemTests): pass -class TestMissing(base.BaseMissingTests): +class TestGroupby(base.BaseGroupbyTests): pass -# TODO(https://github.com/googleapis/python-db-dtypes-pandas/issues/78): Add -# compliance tests for reduction operations. +class TestIndex(base.BaseIndexTests): + pass + + +class TestInterface(base.BaseInterfaceTests): + pass + + +class TestMissing(base.BaseMissingTests): + pass class TestMethods(base.BaseMethodsTests): @@ -76,15 +91,7 @@ def test_value_counts(self, all_data, dropna): self.assert_series_equal(result, expected) -class TestCasting(base.BaseCastingTests): - pass - - -class TestGroupby(base.BaseGroupbyTests): - pass - - -class TestSetitem(base.BaseSetitemTests): +class TestParsing(base.BaseParsingTests): pass @@ -92,9 +99,9 @@ class TestPrinting(base.BasePrintingTests): pass -# TODO(https://github.com/googleapis/python-db-dtypes-pandas/issues/78): Add -# compliance tests for arithmetic operations. +class TestReshaping(base.BaseReshapingTests): + pass -class TestComparisonOps(base.BaseComparisonOpsTests): +class TestSetitem(base.BaseSetitemTests): pass diff --git a/packages/db-dtypes/tests/compliance/date/test_date_compliance_1_5.py b/packages/db-dtypes/tests/compliance/date/test_date_compliance_1_5.py index 9c6da243f2df..e8f2c93fce87 100644 --- a/packages/db-dtypes/tests/compliance/date/test_date_compliance_1_5.py +++ b/packages/db-dtypes/tests/compliance/date/test_date_compliance_1_5.py @@ -29,7 +29,3 @@ class Test2DCompat(base.NDArrayBacked2DTests): pass - - -class TestIndex(base.BaseIndexTests): - pass From 3e28414774fa58460731df79e159e4810b5461a9 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Thu, 24 Mar 2022 15:59:03 -0500 Subject: [PATCH 056/210] test: add dbtime compliance tests (#90) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: address failing compliance tests in DateArray and TimeArray test: add a test session with prerelease versions of dependencies * fix min/max/median for 2D arrays * fixes except for null contains * actually use NaT as 'advertised' * fix!: use `pandas.NaT` for missing values in dbdate and dbtime dtypes This makes them consistent with other date/time dtypes, as well as internally consistent with the advertised `dtype.na_value`. BREAKING-CHANGE: dbdate and dbtime dtypes return NaT instead of None for missing values Release-As: 0.4.0 * more progress towards compliance * address errors in TestMethods * move tests * add prerelease deps * fix: address failing tests with pandas 1.5.0 test: add a test session with prerelease versions of dependencies * fix owlbot config * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * document why microsecond precision is used * use correct units * add box_func tests * typo * fix: avoid TypeError when using sorted search * add unit tests * fix: dbdate and dbtime support set item * add TestMethods * fix: allow comparison with scalar values * correct behavior for comparison with different types and shapes * use same dtype in shape comparison tests * test: add final dbdate compliance tests and sort * test: add dbtime compliance tests Co-authored-by: Owl Bot --- .../tests/compliance/time/conftest.py | 134 ++++++++++++++++++ .../compliance/time/test_time_compliance.py | 107 ++++++++++++++ .../time/test_time_compliance_1_5.py | 31 ++++ 3 files changed, 272 insertions(+) create mode 100644 packages/db-dtypes/tests/compliance/time/conftest.py create mode 100644 packages/db-dtypes/tests/compliance/time/test_time_compliance.py create mode 100644 packages/db-dtypes/tests/compliance/time/test_time_compliance_1_5.py diff --git a/packages/db-dtypes/tests/compliance/time/conftest.py b/packages/db-dtypes/tests/compliance/time/conftest.py new file mode 100644 index 000000000000..760a06804240 --- /dev/null +++ b/packages/db-dtypes/tests/compliance/time/conftest.py @@ -0,0 +1,134 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime + +import numpy +import pytest + +from db_dtypes import TimeArray, TimeDtype + + +@pytest.fixture(params=["data", "data_missing"]) +def all_data(request, data, data_missing): + """Parametrized fixture giving 'data' and 'data_missing'""" + if request.param == "data": + return data + elif request.param == "data_missing": + return data_missing + + +@pytest.fixture +def data(): + return TimeArray( + numpy.arange( + datetime.datetime(1970, 1, 1), + datetime.datetime(1970, 1, 2), + datetime.timedelta(microseconds=864_123_456), + dtype="datetime64[ns]", + ) + ) + + +@pytest.fixture +def data_for_grouping(): + """ + Data for factorization, grouping, and unique tests. + + Expected to be like [B, B, NA, NA, A, A, B, C] + + Where A < B < C and NA is missing + + See: + https://github.com/pandas-dev/pandas/blob/main/pandas/tests/extension/conftest.py + """ + return TimeArray( + [ + datetime.time(11, 45, 29, 987_654), + datetime.time(11, 45, 29, 987_654), + None, + None, + datetime.time(0, 1, 2, 345_678), + datetime.time(0, 1, 2, 345_678), + datetime.time(11, 45, 29, 987_654), + datetime.time(23, 59, 59, 999_999), + ] + ) + + +@pytest.fixture +def data_for_sorting(): + """ + Length-3 array with a known sort order. + + This should be three items [B, C, A] with + A < B < C + + See: + https://github.com/pandas-dev/pandas/blob/main/pandas/tests/extension/conftest.py + """ + return TimeArray( + [ + datetime.time(11, 45, 29, 987_654), + datetime.time(23, 59, 59, 999_999), + datetime.time(0, 1, 2, 345_678), + ] + ) + + +@pytest.fixture +def data_missing(): + """Length-2 array with [NA, Valid] + + See: + https://github.com/pandas-dev/pandas/blob/main/pandas/tests/extension/conftest.py + """ + return TimeArray([None, datetime.time(13, 7, 42, 123_456)]) + + +@pytest.fixture +def data_missing_for_sorting(): + """ + Length-3 array with a known sort order. + + This should be three items [B, NA, A] with + A < B and NA missing. + + See: + https://github.com/pandas-dev/pandas/blob/main/pandas/tests/extension/conftest.py + """ + return TimeArray( + [datetime.time(13, 7, 42, 123_456), None, datetime.time(1, 2, 3, 456_789)] + ) + + +@pytest.fixture +def data_repeated(data): + """ + Generate many datasets. + + See: + https://github.com/pandas-dev/pandas/blob/main/pandas/tests/extension/conftest.py + """ + + def gen(count): + for _ in range(count): + yield data + + return gen + + +@pytest.fixture +def dtype(): + return TimeDtype() diff --git a/packages/db-dtypes/tests/compliance/time/test_time_compliance.py b/packages/db-dtypes/tests/compliance/time/test_time_compliance.py new file mode 100644 index 000000000000..ab1e050a03a4 --- /dev/null +++ b/packages/db-dtypes/tests/compliance/time/test_time_compliance.py @@ -0,0 +1,107 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Tests for extension interface compliance, inherited from pandas. + +See: +https://github.com/pandas-dev/pandas/blob/main/pandas/tests/extension/decimal/test_decimal.py +and +https://github.com/pandas-dev/pandas/blob/main/pandas/tests/extension/test_period.py +""" + +import pandas +from pandas.tests.extension import base +import pytest + +import db_dtypes + + +# TODO(https://github.com/googleapis/python-db-dtypes-pandas/issues/87): Add +# compliance tests for arithmetic operations. + +# TODO(https://github.com/googleapis/python-db-dtypes-pandas/issues/78): Add +# compliance tests for reduction operations. + + +class TestComparisonOps(base.BaseComparisonOpsTests): + pass + + +class TestCasting(base.BaseCastingTests): + pass + + +class TestConstructors(base.BaseConstructorsTests): + pass + + +class TestDtype(base.BaseDtypeTests): + pass + + +class TestGetitem(base.BaseGetitemTests): + pass + + +class TestGroupby(base.BaseGroupbyTests): + pass + + +class TestIndex(base.BaseIndexTests): + pass + + +class TestInterface(base.BaseInterfaceTests): + pass + + +class TestMissing(base.BaseMissingTests): + pass + + +class TestMethods(base.BaseMethodsTests): + def test_combine_add(self): + pytest.skip("Cannot add dates.") + + @pytest.mark.parametrize("dropna", [True, False]) + def test_value_counts(self, all_data, dropna): + all_data = all_data[:10] + if dropna: + # Overridden from + # https://github.com/pandas-dev/pandas/blob/main/pandas/tests/extension/base/methods.py + # to avoid difference in dtypes. + other = db_dtypes.TimeArray(all_data[~all_data.isna()]) + else: + other = all_data + + result = pandas.Series(all_data).value_counts(dropna=dropna).sort_index() + expected = pandas.Series(other).value_counts(dropna=dropna).sort_index() + + self.assert_series_equal(result, expected) + + +class TestParsing(base.BaseParsingTests): + pass + + +class TestPrinting(base.BasePrintingTests): + pass + + +class TestReshaping(base.BaseReshapingTests): + pass + + +class TestSetitem(base.BaseSetitemTests): + pass diff --git a/packages/db-dtypes/tests/compliance/time/test_time_compliance_1_5.py b/packages/db-dtypes/tests/compliance/time/test_time_compliance_1_5.py new file mode 100644 index 000000000000..e8f2c93fce87 --- /dev/null +++ b/packages/db-dtypes/tests/compliance/time/test_time_compliance_1_5.py @@ -0,0 +1,31 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Tests for extension interface compliance, inherited from pandas. + +See: +https://github.com/pandas-dev/pandas/blob/main/pandas/tests/extension/decimal/test_decimal.py +and +https://github.com/pandas-dev/pandas/blob/main/pandas/tests/extension/test_period.py +""" + +from pandas.tests.extension import base +import pytest + +# NDArrayBacked2DTests suite added in https://github.com/pandas-dev/pandas/pull/44974 +pytest.importorskip("pandas", minversion="1.5.0dev") + + +class Test2DCompat(base.NDArrayBacked2DTests): + pass From b92361f90eab8395bead78d8db78c4669ad0adb3 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 24 Mar 2022 21:06:15 +0000 Subject: [PATCH 057/210] chore(main): release 0.4.0 (#62) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit :robot: I have created a release *beep* *boop* --- ## [0.4.0](https://github.com/googleapis/python-db-dtypes-pandas/compare/v0.3.1...v0.4.0) (2022-03-24) ### ⚠ BREAKING CHANGES * * fix: address failing compliance tests in DateArray and TimeArray * * fix: address failing compliance tests in DateArray and TimeArray * * fix: address failing compliance tests in DateArray and TimeArray * * fix: address failing compliance tests in DateArray and TimeArray * * fix: address failing compliance tests in DateArray and TimeArray * * fix: address failing compliance tests in DateArray and TimeArray * dbdate and dbtime dtypes return NaT instead of None for missing values ### Features * dbdate and dbtime support numpy.datetime64 values in array constructor ([1db1357](https://github.com/googleapis/python-db-dtypes-pandas/commit/1db1357186b234a28b2ced10174bbd06e2f0ab73)) ### Bug Fixes * address failing 2D array compliance tests in DateArray ([#64](https://github.com/googleapis/python-db-dtypes-pandas/issues/64)) ([b771e05](https://github.com/googleapis/python-db-dtypes-pandas/commit/b771e050acd2bdbf469a97f7477036c159b500f8)) * address failing tests with pandas 1.5.0 ([#82](https://github.com/googleapis/python-db-dtypes-pandas/issues/82)) ([38ac28d](https://github.com/googleapis/python-db-dtypes-pandas/commit/38ac28d8b16f9b86b5029c85e45e9f2e034159b7)) * allow comparison with scalar values ([#88](https://github.com/googleapis/python-db-dtypes-pandas/issues/88)) ([7495698](https://github.com/googleapis/python-db-dtypes-pandas/commit/7495698b3be3b7e8055ae450e24cd0e366b1b72a)) * avoid TypeError when using sorted search ([#84](https://github.com/googleapis/python-db-dtypes-pandas/issues/84)) ([42bc2d9](https://github.com/googleapis/python-db-dtypes-pandas/commit/42bc2d90174d152dfed782acf77016da55dbdaca)) * correct TypeError and comparison issues discovered in DateArray compliance tests ([#79](https://github.com/googleapis/python-db-dtypes-pandas/issues/79)) ([1e979cf](https://github.com/googleapis/python-db-dtypes-pandas/commit/1e979cf360eb586e77b415f7b710a8a41c22e981)) * dbdate and dbtime support set item with null values ([#85](https://github.com/googleapis/python-db-dtypes-pandas/issues/85)) ([1db1357](https://github.com/googleapis/python-db-dtypes-pandas/commit/1db1357186b234a28b2ced10174bbd06e2f0ab73)) * use `pandas.NaT` for missing values in dbdate and dbtime dtypes ([#67](https://github.com/googleapis/python-db-dtypes-pandas/issues/67)) ([f903c2c](https://github.com/googleapis/python-db-dtypes-pandas/commit/f903c2c68da1629241cf3bf37e1226babae669f4)) * use public pandas APIs where possible ([#60](https://github.com/googleapis/python-db-dtypes-pandas/issues/60)) ([e9d41d1](https://github.com/googleapis/python-db-dtypes-pandas/commit/e9d41d17b5d6a7d83c46e2497feb8e314545adcb)) ### Tests * add dbtime compliance tests ([#90](https://github.com/googleapis/python-db-dtypes-pandas/issues/90)) ([f14fb2b](https://github.com/googleapis/python-db-dtypes-pandas/commit/f14fb2bf78d8427b9546db4cdad1d893c1b1e5e1)) * add final dbdate compliance tests and sort ([#89](https://github.com/googleapis/python-db-dtypes-pandas/issues/89)) ([efe7e6d](https://github.com/googleapis/python-db-dtypes-pandas/commit/efe7e6d8953ebf8d2b4d9468c7c92638ea2ec9f9)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- packages/db-dtypes/CHANGELOG.md | 29 +++++++++++++++++++++++++ packages/db-dtypes/db_dtypes/version.py | 2 +- 2 files changed, 30 insertions(+), 1 deletion(-) diff --git a/packages/db-dtypes/CHANGELOG.md b/packages/db-dtypes/CHANGELOG.md index 0298e79d02c8..b46bc6bbc1af 100644 --- a/packages/db-dtypes/CHANGELOG.md +++ b/packages/db-dtypes/CHANGELOG.md @@ -1,5 +1,34 @@ # Changelog +## [0.4.0](https://github.com/googleapis/python-db-dtypes-pandas/compare/v0.3.1...v0.4.0) (2022-03-24) + + +### ⚠ BREAKING CHANGES + +* dbdate and dbtime dtypes return NaT instead of None for missing values + +### Features + +* dbdate and dbtime support numpy.datetime64 values in array constructor ([1db1357](https://github.com/googleapis/python-db-dtypes-pandas/commit/1db1357186b234a28b2ced10174bbd06e2f0ab73)) + + +### Bug Fixes + +* address failing 2D array compliance tests in DateArray ([#64](https://github.com/googleapis/python-db-dtypes-pandas/issues/64)) ([b771e05](https://github.com/googleapis/python-db-dtypes-pandas/commit/b771e050acd2bdbf469a97f7477036c159b500f8)) +* address failing tests with pandas 1.5.0 ([#82](https://github.com/googleapis/python-db-dtypes-pandas/issues/82)) ([38ac28d](https://github.com/googleapis/python-db-dtypes-pandas/commit/38ac28d8b16f9b86b5029c85e45e9f2e034159b7)) +* allow comparison with scalar values ([#88](https://github.com/googleapis/python-db-dtypes-pandas/issues/88)) ([7495698](https://github.com/googleapis/python-db-dtypes-pandas/commit/7495698b3be3b7e8055ae450e24cd0e366b1b72a)) +* avoid TypeError when using sorted search ([#84](https://github.com/googleapis/python-db-dtypes-pandas/issues/84)) ([42bc2d9](https://github.com/googleapis/python-db-dtypes-pandas/commit/42bc2d90174d152dfed782acf77016da55dbdaca)) +* correct TypeError and comparison issues discovered in DateArray compliance tests ([#79](https://github.com/googleapis/python-db-dtypes-pandas/issues/79)) ([1e979cf](https://github.com/googleapis/python-db-dtypes-pandas/commit/1e979cf360eb586e77b415f7b710a8a41c22e981)) +* dbdate and dbtime support set item with null values ([#85](https://github.com/googleapis/python-db-dtypes-pandas/issues/85)) ([1db1357](https://github.com/googleapis/python-db-dtypes-pandas/commit/1db1357186b234a28b2ced10174bbd06e2f0ab73)) +* use `pandas.NaT` for missing values in dbdate and dbtime dtypes ([#67](https://github.com/googleapis/python-db-dtypes-pandas/issues/67)) ([f903c2c](https://github.com/googleapis/python-db-dtypes-pandas/commit/f903c2c68da1629241cf3bf37e1226babae669f4)) +* use public pandas APIs where possible ([#60](https://github.com/googleapis/python-db-dtypes-pandas/issues/60)) ([e9d41d1](https://github.com/googleapis/python-db-dtypes-pandas/commit/e9d41d17b5d6a7d83c46e2497feb8e314545adcb)) + + +### Tests + +* add dbtime compliance tests ([#90](https://github.com/googleapis/python-db-dtypes-pandas/issues/90)) ([f14fb2b](https://github.com/googleapis/python-db-dtypes-pandas/commit/f14fb2bf78d8427b9546db4cdad1d893c1b1e5e1)) +* add final dbdate compliance tests and sort ([#89](https://github.com/googleapis/python-db-dtypes-pandas/issues/89)) ([efe7e6d](https://github.com/googleapis/python-db-dtypes-pandas/commit/efe7e6d8953ebf8d2b4d9468c7c92638ea2ec9f9)) + ### [0.3.1](https://www.github.com/googleapis/python-db-dtypes-pandas/compare/v0.3.0...v0.3.1) (2021-12-04) diff --git a/packages/db-dtypes/db_dtypes/version.py b/packages/db-dtypes/db_dtypes/version.py index b118f0850400..c0c2669d20c7 100644 --- a/packages/db-dtypes/db_dtypes/version.py +++ b/packages/db-dtypes/db_dtypes/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "0.3.1" +__version__ = "0.4.0" From 03426e18d87bedd89641eeaceb6e82f98b6e6317 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Fri, 25 Mar 2022 11:03:31 -0500 Subject: [PATCH 058/210] feat: label package as generally available (#92) * feat: label package as generally available Release-As: 1.0.0 Co-authored-by: Dan Lee <71398022+dandhlee@users.noreply.github.com> --- packages/db-dtypes/.repo-metadata.json | 2 +- packages/db-dtypes/README.rst | 6 +++--- packages/db-dtypes/setup.py | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/db-dtypes/.repo-metadata.json b/packages/db-dtypes/.repo-metadata.json index 176e5d9de749..027751fe0b26 100644 --- a/packages/db-dtypes/.repo-metadata.json +++ b/packages/db-dtypes/.repo-metadata.json @@ -2,7 +2,7 @@ "name": "db-dtypes", "name_pretty": "Pandas Data Types for SQL systems (BigQuery, Spanner)", "client_documentation": "https://googleapis.dev/python/db-dtypes/latest/index.html", - "release_level": "preview", + "release_level": "stable", "language": "python", "library_type": "INTEGRATION", "repo": "googleapis/python-db-dtypes-pandas", diff --git a/packages/db-dtypes/README.rst b/packages/db-dtypes/README.rst index a14a61db5e76..8ea047c503ea 100644 --- a/packages/db-dtypes/README.rst +++ b/packages/db-dtypes/README.rst @@ -1,14 +1,14 @@ Pandas Data Types for SQL systems (BigQuery, Spanner) ===================================================== -|beta| |pypi| |versions| +|ga| |pypi| |versions| `Pandas extension data types`_ for data from SQL systems such as `BigQuery`_. - `Library Documentation`_ -.. |beta| image:: https://img.shields.io/badge/support-beta-orange.svg - :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#beta-support +.. |ga| image:: https://img.shields.io/badge/support-GA-gold.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#general-availability .. |pypi| image:: https://img.shields.io/pypi/v/db-dtypes.svg :target: https://pypi.org/project/db-dtypes/ .. |versions| image:: https://img.shields.io/pypi/pyversions/db-dtypes.svg diff --git a/packages/db-dtypes/setup.py b/packages/db-dtypes/setup.py index 7ad511900869..4a9073a69ab6 100644 --- a/packages/db-dtypes/setup.py +++ b/packages/db-dtypes/setup.py @@ -26,7 +26,7 @@ # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' # 'Development Status :: 5 - Production/Stable' -release_status = "Development Status :: 4 - Beta" +release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "packaging >= 17.0", From 27a4e3c4da84b78a97c71478fbf99c6b1022dc9d Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Fri, 25 Mar 2022 17:36:45 -0500 Subject: [PATCH 059/210] test: disambiguate log files for flakybot (#95) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * test: disambiguate log files for flakybot * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * try different owlbot config * try again without backslash * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- packages/db-dtypes/noxfile.py | 4 ++-- packages/db-dtypes/owlbot.py | 7 ++++++- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/packages/db-dtypes/noxfile.py b/packages/db-dtypes/noxfile.py index e3f4d5c814bb..0ab62909f490 100644 --- a/packages/db-dtypes/noxfile.py +++ b/packages/db-dtypes/noxfile.py @@ -103,7 +103,7 @@ def default(session, tests_path): session.run( "py.test", "--quiet", - f"--junitxml=unit_{session.python}_sponge_log.xml", + f"--junitxml={os.path.split(tests_path)[-1]}_{session.python}_sponge_log.xml", "--cov=db_dtypes", "--cov=tests/unit", "--cov-append", @@ -179,7 +179,7 @@ def prerelease(session, tests_path): session.run( "py.test", "--quiet", - f"--junitxml=prerelease_unit_{session.python}_sponge_log.xml", + f"--junitxml={os.path.split(tests_path)[-1]}_prerelease_{session.python}_sponge_log.xml", "--cov=db_dtypes", "--cov=tests/unit", "--cov-append", diff --git a/packages/db-dtypes/owlbot.py b/packages/db-dtypes/owlbot.py index ec5a5bfcadbd..383ae3714d66 100644 --- a/packages/db-dtypes/owlbot.py +++ b/packages/db-dtypes/owlbot.py @@ -81,6 +81,11 @@ ["noxfile.py"], r"def default\(session\):", "def default(session, tests_path):" ) s.replace(["noxfile.py"], r'os.path.join\("tests", "unit"\),', "tests_path,") +s.replace( + ["noxfile.py"], + r'f"--junitxml=unit_{session.python}_sponge_log.xml",', + r'f"--junitxml={os.path.split(tests_path)[-1]}_{session.python}_sponge_log.xml",', +) s.replace( ["noxfile.py"], r''' @@ -154,7 +159,7 @@ def prerelease(session, tests_path): session.run( "py.test", "--quiet", - f"--junitxml=prerelease_unit_{session.python}_sponge_log.xml", + f"--junitxml={os.path.split(tests_path)[-1]}_prerelease_{session.python}_sponge_log.xml", "--cov=db_dtypes", "--cov=tests/unit", "--cov-append", From cc0aeb50002f897b3a254d4664769912d391e4f0 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 28 Mar 2022 09:40:20 -0500 Subject: [PATCH 060/210] chore(main): release 1.0.0 (#94) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Anthonios Partheniou --- packages/db-dtypes/CHANGELOG.md | 7 +++++++ packages/db-dtypes/db_dtypes/version.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/db-dtypes/CHANGELOG.md b/packages/db-dtypes/CHANGELOG.md index b46bc6bbc1af..44596f4bb4a4 100644 --- a/packages/db-dtypes/CHANGELOG.md +++ b/packages/db-dtypes/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.0.0](https://github.com/googleapis/python-db-dtypes-pandas/compare/v0.4.0...v1.0.0) (2022-03-25) + + +### Features + +* label package as generally available ([#92](https://github.com/googleapis/python-db-dtypes-pandas/issues/92)) ([0363e87](https://github.com/googleapis/python-db-dtypes-pandas/commit/0363e8725b322881c1fe1e89bdeadd0f67317d22)) + ## [0.4.0](https://github.com/googleapis/python-db-dtypes-pandas/compare/v0.3.1...v0.4.0) (2022-03-24) diff --git a/packages/db-dtypes/db_dtypes/version.py b/packages/db-dtypes/db_dtypes/version.py index c0c2669d20c7..f1637e76c627 100644 --- a/packages/db-dtypes/db_dtypes/version.py +++ b/packages/db-dtypes/db_dtypes/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "0.4.0" +__version__ = "1.0.0" From 716d1037bcb34df1a71e9b538d63b15b1159a756 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 29 Mar 2022 00:04:19 +0000 Subject: [PATCH 061/210] chore(python): use black==22.3.0 (#96) Source-Link: https://github.com/googleapis/synthtool/commit/6fab84af09f2cf89a031fd8671d1def6b2931b11 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:7cffbc10910c3ab1b852c05114a08d374c195a81cdec1d4a67a1d129331d0bfe --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 2 +- packages/db-dtypes/db_dtypes/__init__.py | 6 ++-- packages/db-dtypes/db_dtypes/core.py | 8 ++++-- packages/db-dtypes/docs/conf.py | 23 +++++++++++++-- packages/db-dtypes/noxfile.py | 9 ++++-- .../db-dtypes/samples/snippets/noxfile.py | 4 +-- .../snippets/pandas_date_and_time_test.py | 3 +- packages/db-dtypes/tests/unit/test_arrow.py | 28 +++++++++++++------ packages/db-dtypes/tests/unit/test_dtypes.py | 6 ++-- 9 files changed, 65 insertions(+), 24 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 44c78f7cc12d..87dd00611576 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:4e1991042fe54b991db9ca17c8fb386e61b22fe4d1472a568bf0fcac85dcf5d3 + digest: sha256:7cffbc10910c3ab1b852c05114a08d374c195a81cdec1d4a67a1d129331d0bfe diff --git a/packages/db-dtypes/db_dtypes/__init__.py b/packages/db-dtypes/db_dtypes/__init__.py index 7889dac672bf..ca0b4680d2fa 100644 --- a/packages/db-dtypes/db_dtypes/__init__.py +++ b/packages/db-dtypes/db_dtypes/__init__.py @@ -197,7 +197,8 @@ def __arrow_array__(self, type=None): # since the datetime epoch (midnight 1970-01-01). array = array.view(pyarrow.time64("ns")) return pyarrow.compute.cast( - array, type if type is not None else pyarrow.time64("ns"), + array, + type if type is not None else pyarrow.time64("ns"), ) @@ -297,7 +298,8 @@ def __arrow_array__(self, type=None): """ array = pyarrow.array(self._ndarray, type=pyarrow.timestamp("ns")) return pyarrow.compute.cast( - array, type if type is not None else pyarrow.date32(), + array, + type if type is not None else pyarrow.date32(), ) def __add__(self, other): diff --git a/packages/db-dtypes/db_dtypes/core.py b/packages/db-dtypes/db_dtypes/core.py index 68123e188b7e..e9ab4add7148 100644 --- a/packages/db-dtypes/db_dtypes/core.py +++ b/packages/db-dtypes/db_dtypes/core.py @@ -64,7 +64,10 @@ def __init__(self, values, dtype=None, copy: bool = False): @classmethod def __ndarray(cls, scalars): - return numpy.array([cls._datetime(scalar) for scalar in scalars], "M8[ns]",) + return numpy.array( + [cls._datetime(scalar) for scalar in scalars], + "M8[ns]", + ) @classmethod def _from_sequence(cls, scalars, *, dtype=None, copy=False): @@ -186,7 +189,8 @@ def median( raise NotImplementedError("Need pandas 1.3 or later to calculate median.") pandas_backports.numpy_validate_median( - (), {"out": out, "overwrite_input": overwrite_input, "keepdims": keepdims}, + (), + {"out": out, "overwrite_input": overwrite_input, "keepdims": keepdims}, ) result = pandas_backports.nanmedian(self._ndarray, axis=axis, skipna=skipna) if axis is None or self.ndim == 1: diff --git a/packages/db-dtypes/docs/conf.py b/packages/db-dtypes/docs/conf.py index 5cf73ba1c1c3..0565c618821c 100644 --- a/packages/db-dtypes/docs/conf.py +++ b/packages/db-dtypes/docs/conf.py @@ -279,7 +279,13 @@ # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (root_doc, "db-dtypes.tex", "db-dtypes Documentation", author, "manual",) + ( + root_doc, + "db-dtypes.tex", + "db-dtypes Documentation", + author, + "manual", + ) ] # The name of an image file (relative to this directory) to place at the top of @@ -307,7 +313,15 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -man_pages = [(root_doc, "db-dtypes", "db-dtypes Documentation", [author], 1,)] +man_pages = [ + ( + root_doc, + "db-dtypes", + "db-dtypes Documentation", + [author], + 1, + ) +] # If true, show URL addresses after external links. # man_show_urls = False @@ -347,7 +361,10 @@ intersphinx_mapping = { "python": ("https://python.readthedocs.org/en/latest/", None), "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), "grpc": ("https://grpc.github.io/grpc/python/", None), "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), diff --git a/packages/db-dtypes/noxfile.py b/packages/db-dtypes/noxfile.py index 0ab62909f490..d529185e0297 100644 --- a/packages/db-dtypes/noxfile.py +++ b/packages/db-dtypes/noxfile.py @@ -25,7 +25,7 @@ import nox -BLACK_VERSION = "black==19.10b0" +BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "db_dtypes", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" @@ -60,7 +60,9 @@ def lint(session): """ session.install("flake8", BLACK_VERSION) session.run( - "black", "--check", *BLACK_PATHS, + "black", + "--check", + *BLACK_PATHS, ) session.run("flake8", "db_dtypes", "tests") @@ -70,7 +72,8 @@ def blacken(session): """Run black. Format code to uniform standard.""" session.install(BLACK_VERSION) session.run( - "black", *BLACK_PATHS, + "black", + *BLACK_PATHS, ) diff --git a/packages/db-dtypes/samples/snippets/noxfile.py b/packages/db-dtypes/samples/snippets/noxfile.py index 85f5836dba3a..25f87a215d4c 100644 --- a/packages/db-dtypes/samples/snippets/noxfile.py +++ b/packages/db-dtypes/samples/snippets/noxfile.py @@ -29,7 +29,7 @@ # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING -BLACK_VERSION = "black==19.10b0" +BLACK_VERSION = "black==22.3.0" # Copy `noxfile_config.py` to your directory and modify it instead. @@ -253,7 +253,7 @@ def py(session: nox.sessions.Session) -> None: def _get_repo_root() -> Optional[str]: - """ Returns the root folder of the project. """ + """Returns the root folder of the project.""" # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) for i in range(10): diff --git a/packages/db-dtypes/samples/snippets/pandas_date_and_time_test.py b/packages/db-dtypes/samples/snippets/pandas_date_and_time_test.py index 6f78240e41c8..50c56d5745ee 100644 --- a/packages/db-dtypes/samples/snippets/pandas_date_and_time_test.py +++ b/packages/db-dtypes/samples/snippets/pandas_date_and_time_test.py @@ -39,7 +39,8 @@ def test_pandas_date_and_time(): assert list(dates) == [datetime.date(2021, 9, 17), datetime.date(2021, 9, 18)] assert np.array_equal( - diffs, dates.astype("datetime64") - dates2.astype("datetime64"), + diffs, + dates.astype("datetime64") - dates2.astype("datetime64"), ) assert np.array_equal(after, dates.astype("object") + do) diff --git a/packages/db-dtypes/tests/unit/test_arrow.py b/packages/db-dtypes/tests/unit/test_arrow.py index 4d4fc50baecf..56bbd01b7d25 100644 --- a/packages/db-dtypes/tests/unit/test_arrow.py +++ b/packages/db-dtypes/tests/unit/test_arrow.py @@ -54,7 +54,8 @@ def types_mapper( [dt.date(2021, 9, 27), None, dt.date(2011, 9, 27)], dtype="dbdate" ), pyarrow.array( - [dt.date(2021, 9, 27), None, dt.date(2011, 9, 27)], type=pyarrow.date32(), + [dt.date(2021, 9, 27), None, dt.date(2011, 9, 27)], + type=pyarrow.date32(), ), ), ( @@ -67,14 +68,18 @@ def types_mapper( type=pyarrow.date32(), ), ), - (pandas.Series([], dtype="dbtime"), pyarrow.array([], type=pyarrow.time64("ns")),), + ( + pandas.Series([], dtype="dbtime"), + pyarrow.array([], type=pyarrow.time64("ns")), + ), ( pandas.Series([None, None, None], dtype="dbtime"), pyarrow.array([None, None, None], type=pyarrow.time64("ns")), ), ( pandas.Series( - [dt.time(0, 0, 0, 0), None, dt.time(23, 59, 59, 999_999)], dtype="dbtime", + [dt.time(0, 0, 0, 0), None, dt.time(23, 59, 59, 999_999)], + dtype="dbtime", ), pyarrow.array( [dt.time(0, 0, 0, 0), None, dt.time(23, 59, 59, 999_999)], @@ -111,7 +116,8 @@ def types_mapper( [dt.date(2021, 9, 27), None, dt.date(2011, 9, 27)], dtype="dbdate" ), pyarrow.array( - [dt.date(2021, 9, 27), None, dt.date(2011, 9, 27)], type=pyarrow.date64(), + [dt.date(2021, 9, 27), None, dt.date(2011, 9, 27)], + type=pyarrow.date64(), ), ), ( @@ -124,14 +130,18 @@ def types_mapper( type=pyarrow.date64(), ), ), - (pandas.Series([], dtype="dbtime"), pyarrow.array([], type=pyarrow.time32("ms")),), + ( + pandas.Series([], dtype="dbtime"), + pyarrow.array([], type=pyarrow.time32("ms")), + ), ( pandas.Series([None, None, None], dtype="dbtime"), pyarrow.array([None, None, None], type=pyarrow.time32("ms")), ), ( pandas.Series( - [dt.time(0, 0, 0, 0), None, dt.time(23, 59, 59, 999_000)], dtype="dbtime", + [dt.time(0, 0, 0, 0), None, dt.time(23, 59, 59, 999_000)], + dtype="dbtime", ), pyarrow.array( [dt.time(0, 0, 0, 0), None, dt.time(23, 59, 59, 999_000)], @@ -158,7 +168,8 @@ def types_mapper( ), ( pandas.Series( - [dt.time(0, 0, 0, 0), None, dt.time(23, 59, 59, 999_999)], dtype="dbtime", + [dt.time(0, 0, 0, 0), None, dt.time(23, 59, 59, 999_999)], + dtype="dbtime", ), pyarrow.array( [dt.time(0, 0, 0, 0), None, dt.time(23, 59, 59, 999_999)], @@ -220,7 +231,8 @@ def types_mapper( ), pytest.param( pandas.Series( - ["0:0:0", "12:30:15.123456789", "23:59:59.999999999"], dtype="dbtime", + ["0:0:0", "12:30:15.123456789", "23:59:59.999999999"], + dtype="dbtime", ), pyarrow.array( [ diff --git a/packages/db-dtypes/tests/unit/test_dtypes.py b/packages/db-dtypes/tests/unit/test_dtypes.py index dc1613be74ff..6584cee7ee6a 100644 --- a/packages/db-dtypes/tests/unit/test_dtypes.py +++ b/packages/db-dtypes/tests/unit/test_dtypes.py @@ -201,7 +201,8 @@ def test___getitem___arrayindex(dtype): cls = _cls(dtype) sample_values = SAMPLE_VALUES[dtype] np.testing.assert_array_equal( - cls(sample_values)[[1, 3]], cls([sample_values[1], sample_values[3]]), + cls(sample_values)[[1, 3]], + cls([sample_values[1], sample_values[3]]), ) @@ -410,7 +411,8 @@ def test_unique(dtype): cls = _cls(dtype) sample_values = SAMPLE_VALUES[dtype] np.testing.assert_array_equal( - cls(sample_values * 3).unique(), cls(sample_values), + cls(sample_values * 3).unique(), + cls(sample_values), ) From 114978cb9190346450bcd9d7144d00fc554328da Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 30 Mar 2022 16:56:25 +0000 Subject: [PATCH 062/210] chore(python): add E231 to .flake8 ignore list (#97) Source-Link: https://github.com/googleapis/synthtool/commit/7ff4aad2ec5af0380e8bd6da1fa06eaadf24ec81 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:462782b0b492346b2d9099aaff52206dd30bc8e031ea97082e6facecc2373244 --- packages/db-dtypes/.flake8 | 2 +- packages/db-dtypes/.github/.OwlBot.lock.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/db-dtypes/.flake8 b/packages/db-dtypes/.flake8 index 29227d4cf419..2e438749863d 100644 --- a/packages/db-dtypes/.flake8 +++ b/packages/db-dtypes/.flake8 @@ -16,7 +16,7 @@ # Generated by synthtool. DO NOT EDIT! [flake8] -ignore = E203, E266, E501, W503 +ignore = E203, E231, E266, E501, W503 exclude = # Exclude generated code. **/proto/** diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 87dd00611576..9e0a9356b6eb 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:7cffbc10910c3ab1b852c05114a08d374c195a81cdec1d4a67a1d129331d0bfe + digest: sha256:462782b0b492346b2d9099aaff52206dd30bc8e031ea97082e6facecc2373244 From b9300a2d5ba38ad4cfb130bf0bc7ec5facacb869 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 30 Mar 2022 20:14:51 -0700 Subject: [PATCH 063/210] chore(python): update .pre-commit-config.yaml to use black==22.3.0 (#98) Source-Link: https://github.com/googleapis/synthtool/commit/7804ade3daae0d66649bee8df6c55484c6580b8d Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:eede5672562a32821444a8e803fb984a6f61f2237ea3de229d2de24453f4ae7d Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 3 ++- packages/db-dtypes/.pre-commit-config.yaml | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 9e0a9356b6eb..22cc254afa2c 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,4 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:462782b0b492346b2d9099aaff52206dd30bc8e031ea97082e6facecc2373244 + digest: sha256:eede5672562a32821444a8e803fb984a6f61f2237ea3de229d2de24453f4ae7d +# created: 2022-03-30T23:44:26.560599165Z diff --git a/packages/db-dtypes/.pre-commit-config.yaml b/packages/db-dtypes/.pre-commit-config.yaml index 62eb5a77d9a3..46d237160f6d 100644 --- a/packages/db-dtypes/.pre-commit-config.yaml +++ b/packages/db-dtypes/.pre-commit-config.yaml @@ -22,7 +22,7 @@ repos: - id: end-of-file-fixer - id: check-yaml - repo: https://github.com/psf/black - rev: 19.10b0 + rev: 22.3.0 hooks: - id: black - repo: https://gitlab.com/pycqa/flake8 From 68dbbc0eeff68df022245d0c9db89ed14d948b32 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 31 Mar 2022 22:29:26 -0400 Subject: [PATCH 064/210] chore(python): Enable size-label bot (#99) Source-Link: https://github.com/googleapis/synthtool/commit/06e82790dd719a165ad32b8a06f8f6ec3e3cae0f Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:b3500c053313dc34e07b1632ba9e4e589f4f77036a7cf39e1fe8906811ae0fce Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 4 ++-- packages/db-dtypes/.github/auto-label.yaml | 2 ++ 2 files changed, 4 insertions(+), 2 deletions(-) create mode 100644 packages/db-dtypes/.github/auto-label.yaml diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 22cc254afa2c..58a0b153bf0e 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:eede5672562a32821444a8e803fb984a6f61f2237ea3de229d2de24453f4ae7d -# created: 2022-03-30T23:44:26.560599165Z + digest: sha256:b3500c053313dc34e07b1632ba9e4e589f4f77036a7cf39e1fe8906811ae0fce +# created: 2022-04-01T01:42:03.609279246Z diff --git a/packages/db-dtypes/.github/auto-label.yaml b/packages/db-dtypes/.github/auto-label.yaml new file mode 100644 index 000000000000..09c8d735b456 --- /dev/null +++ b/packages/db-dtypes/.github/auto-label.yaml @@ -0,0 +1,2 @@ +requestsize: + enabled: true From 8ed3ed14602465853f9e6a03409d7cd11381c01a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 1 Apr 2022 19:42:18 +0000 Subject: [PATCH 065/210] chore(python): refactor unit / system test dependency install (#100) Source-Link: https://github.com/googleapis/synthtool/commit/993985f0fc4b37152e588f0549bcbdaf34666023 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:1894490910e891a385484514b22eb5133578897eb5b3c380e6d8ad475c6647cd --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 4 +- packages/db-dtypes/noxfile.py | 105 +++++++++++++++---- 2 files changed, 87 insertions(+), 22 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 58a0b153bf0e..fa5762290c5b 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:b3500c053313dc34e07b1632ba9e4e589f4f77036a7cf39e1fe8906811ae0fce -# created: 2022-04-01T01:42:03.609279246Z + digest: sha256:1894490910e891a385484514b22eb5133578897eb5b3c380e6d8ad475c6647cd +# created: 2022-04-01T15:48:07.524222836Z diff --git a/packages/db-dtypes/noxfile.py b/packages/db-dtypes/noxfile.py index d529185e0297..4c96c396c50c 100644 --- a/packages/db-dtypes/noxfile.py +++ b/packages/db-dtypes/noxfile.py @@ -21,16 +21,40 @@ import pathlib import re import shutil +import warnings import nox - BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "db_dtypes", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" -SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] + UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [] +UNIT_TEST_LOCAL_DEPENDENCIES = [] +UNIT_TEST_DEPENDENCIES = [] +UNIT_TEST_EXTRAS = [] +UNIT_TEST_EXTRAS_BY_PYTHON = {} + +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES = [] +SYSTEM_TEST_DEPENDENCIES = [] +SYSTEM_TEST_EXTRAS = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON = {} CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -84,23 +108,41 @@ def lint_setup_py(session): session.run("python", "setup.py", "check", "--restructuredtext", "--strict") +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + def default(session, tests_path): # Install all test dependencies, then install this package in-place. constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) - session.install( - "mock", - "asyncmock", - "pytest", - "pytest-cov", - "pytest-asyncio", - "-c", - constraints_path, - ) - - session.install("-e", ".", "-c", constraints_path) + install_unittest_dependencies(session, "-c", constraints_path) # Run py.test against the unit tests. session.run( @@ -218,6 +260,35 @@ def unit_prerelease(session): prerelease(session, os.path.join("tests", "unit")) +def install_systemtest_dependencies(session, *constraints): + + # Use pre-release gRPC for system tests. + session.install("--pre", "grpcio") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + @nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) def system(session): """Run the system test suite.""" @@ -240,13 +311,7 @@ def system(session): if not system_test_exists and not system_test_folder_exists: session.skip("System tests were not found") - # Use pre-release gRPC for system tests. - session.install("--pre", "grpcio") - - # Install all test dependencies, then install this package into the - # virtualenv's dist-packages. - session.install("mock", "pytest", "google-cloud-testutils", "-c", constraints_path) - session.install("-e", ".", "-c", constraints_path) + install_systemtest_dependencies(session, "-c", constraints_path) # Run py.test against the system tests. if system_test_exists: From a603398ea5c4e64f3aa1e43116b08538f6bbf273 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 6 Apr 2022 10:46:31 +0000 Subject: [PATCH 066/210] chore(python): add license header to auto-label.yaml (#101) Source-Link: https://github.com/googleapis/synthtool/commit/eb78c980b52c7c6746d2edb77d9cf7aaa99a2aab Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:8a5d3f6a2e43ed8293f34e06a2f56931d1e88a2694c3bb11b15df4eb256ad163 --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 4 ++-- packages/db-dtypes/.github/auto-label.yaml | 13 +++++++++++++ 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index fa5762290c5b..bc893c979e20 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:1894490910e891a385484514b22eb5133578897eb5b3c380e6d8ad475c6647cd -# created: 2022-04-01T15:48:07.524222836Z + digest: sha256:8a5d3f6a2e43ed8293f34e06a2f56931d1e88a2694c3bb11b15df4eb256ad163 +# created: 2022-04-06T10:30:21.687684602Z diff --git a/packages/db-dtypes/.github/auto-label.yaml b/packages/db-dtypes/.github/auto-label.yaml index 09c8d735b456..41bff0b5375a 100644 --- a/packages/db-dtypes/.github/auto-label.yaml +++ b/packages/db-dtypes/.github/auto-label.yaml @@ -1,2 +1,15 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. requestsize: enabled: true From c144bb2de712774a588709908d3aa37fea4a8df0 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 20 Apr 2022 20:13:57 -0400 Subject: [PATCH 067/210] chore(python): add nox session to sort python imports (#102) Source-Link: https://github.com/googleapis/synthtool/commit/1b71c10e20de7ed3f97f692f99a0e3399b67049f Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:00c9d764fd1cd56265f12a5ef4b99a0c9e87cf261018099141e2ca5158890416 Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 4 +-- packages/db-dtypes/noxfile.py | 27 ++++++++++++++++--- .../db-dtypes/samples/snippets/noxfile.py | 22 +++++++++++++++ 3 files changed, 48 insertions(+), 5 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index bc893c979e20..7c454abf76f3 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:8a5d3f6a2e43ed8293f34e06a2f56931d1e88a2694c3bb11b15df4eb256ad163 -# created: 2022-04-06T10:30:21.687684602Z + digest: sha256:00c9d764fd1cd56265f12a5ef4b99a0c9e87cf261018099141e2ca5158890416 +# created: 2022-04-20T23:42:53.970438194Z diff --git a/packages/db-dtypes/noxfile.py b/packages/db-dtypes/noxfile.py index 4c96c396c50c..d4172902b2e4 100644 --- a/packages/db-dtypes/noxfile.py +++ b/packages/db-dtypes/noxfile.py @@ -26,7 +26,8 @@ import nox BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "db_dtypes", "tests", "noxfile.py", "setup.py"] +ISORT_VERSION = "isort==5.10.1" +LINT_PATHS = ["docs", "db_dtypes", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" @@ -86,7 +87,7 @@ def lint(session): session.run( "black", "--check", - *BLACK_PATHS, + *LINT_PATHS, ) session.run("flake8", "db_dtypes", "tests") @@ -97,7 +98,27 @@ def blacken(session): session.install(BLACK_VERSION) session.run( "black", - *BLACK_PATHS, + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, ) diff --git a/packages/db-dtypes/samples/snippets/noxfile.py b/packages/db-dtypes/samples/snippets/noxfile.py index 25f87a215d4c..a40410b56369 100644 --- a/packages/db-dtypes/samples/snippets/noxfile.py +++ b/packages/db-dtypes/samples/snippets/noxfile.py @@ -30,6 +30,7 @@ # WARNING - WARNING - WARNING - WARNING - WARNING BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" # Copy `noxfile_config.py` to your directory and modify it instead. @@ -168,12 +169,33 @@ def lint(session: nox.sessions.Session) -> None: @nox.session def blacken(session: nox.sessions.Session) -> None: + """Run black. Format code to uniform standard.""" session.install(BLACK_VERSION) python_files = [path for path in os.listdir(".") if path.endswith(".py")] session.run("black", *python_files) +# +# format = isort + black +# + + +@nox.session +def format(session: nox.sessions.Session) -> None: + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run("isort", "--fss", *python_files) + session.run("black", *python_files) + + # # Sample Tests # From 5f8f23cb7cff7bf8a35f3eadc7c5292814f55815 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 21 Apr 2022 13:20:35 -0400 Subject: [PATCH 068/210] chore(python): use ubuntu 22.04 in docs image (#104) Source-Link: https://github.com/googleapis/synthtool/commit/f15cc72fb401b4861cedebb10af74afe428fb1f8 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:bc5eed3804aec2f05fad42aacf973821d9500c174015341f721a984a0825b6fd Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 4 ++-- .../db-dtypes/.kokoro/docker/docs/Dockerfile | 20 +++++++++++++++++-- 2 files changed, 20 insertions(+), 4 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 7c454abf76f3..64f82d6bf4bc 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:00c9d764fd1cd56265f12a5ef4b99a0c9e87cf261018099141e2ca5158890416 -# created: 2022-04-20T23:42:53.970438194Z + digest: sha256:bc5eed3804aec2f05fad42aacf973821d9500c174015341f721a984a0825b6fd +# created: 2022-04-21T15:43:16.246106921Z diff --git a/packages/db-dtypes/.kokoro/docker/docs/Dockerfile b/packages/db-dtypes/.kokoro/docker/docs/Dockerfile index 4e1b1fb8b5a5..238b87b9d1c9 100644 --- a/packages/db-dtypes/.kokoro/docker/docs/Dockerfile +++ b/packages/db-dtypes/.kokoro/docker/docs/Dockerfile @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ubuntu:20.04 +from ubuntu:22.04 ENV DEBIAN_FRONTEND noninteractive @@ -60,8 +60,24 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* \ && rm -f /var/cache/apt/archives/*.deb +###################### Install python 3.8.11 + +# Download python 3.8.11 +RUN wget https://www.python.org/ftp/python/3.8.11/Python-3.8.11.tgz + +# Extract files +RUN tar -xvf Python-3.8.11.tgz + +# Install python 3.8.11 +RUN ./Python-3.8.11/configure --enable-optimizations +RUN make altinstall + +###################### Install pip RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ - && python3.8 /tmp/get-pip.py \ + && python3 /tmp/get-pip.py \ && rm /tmp/get-pip.py +# Test pip +RUN python3 -m pip + CMD ["python3.8"] From 9b164cee76b54a37ab2077cd7e96da08e7ff0c90 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 25 Apr 2022 17:00:31 +0200 Subject: [PATCH 069/210] chore(deps): update dependency pytest to v7.1.2 (#105) --- packages/db-dtypes/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/db-dtypes/samples/snippets/requirements-test.txt b/packages/db-dtypes/samples/snippets/requirements-test.txt index 4f6bf643fc5e..d00689e0623a 100644 --- a/packages/db-dtypes/samples/snippets/requirements-test.txt +++ b/packages/db-dtypes/samples/snippets/requirements-test.txt @@ -1 +1 @@ -pytest==7.1.1 +pytest==7.1.2 From bcaa685c1799505d2ead0c822e8654368f2a52a6 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 5 May 2022 12:11:52 -0400 Subject: [PATCH 070/210] chore: [autoapprove] update readme_gen.py to include autoescape True (#106) Source-Link: https://github.com/googleapis/synthtool/commit/6b4d5a6407d740beb4158b302194a62a4108a8a6 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:f792ee1320e03eda2d13a5281a2989f7ed8a9e50b73ef6da97fac7e1e850b149 Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 4 ++-- packages/db-dtypes/scripts/readme-gen/readme_gen.py | 5 ++++- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 64f82d6bf4bc..b631901e99f4 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:bc5eed3804aec2f05fad42aacf973821d9500c174015341f721a984a0825b6fd -# created: 2022-04-21T15:43:16.246106921Z + digest: sha256:f792ee1320e03eda2d13a5281a2989f7ed8a9e50b73ef6da97fac7e1e850b149 +# created: 2022-05-05T15:17:27.599381182Z diff --git a/packages/db-dtypes/scripts/readme-gen/readme_gen.py b/packages/db-dtypes/scripts/readme-gen/readme_gen.py index d309d6e97518..91b59676bfc7 100644 --- a/packages/db-dtypes/scripts/readme-gen/readme_gen.py +++ b/packages/db-dtypes/scripts/readme-gen/readme_gen.py @@ -28,7 +28,10 @@ jinja_env = jinja2.Environment( trim_blocks=True, loader=jinja2.FileSystemLoader( - os.path.abspath(os.path.join(os.path.dirname(__file__), 'templates')))) + os.path.abspath(os.path.join(os.path.dirname(__file__), "templates")) + ), + autoescape=True, +) README_TMPL = jinja_env.get_template('README.tmpl.rst') From fb53f12741864c10b68c91895cbcc21b02f9cc44 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 5 May 2022 23:56:17 +0000 Subject: [PATCH 071/210] chore(python): auto approve template changes (#108) Source-Link: https://github.com/googleapis/synthtool/commit/453a5d9c9a55d1969240a37d36cec626d20a9024 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:81ed5ecdfc7cac5b699ba4537376f3563f6f04122c4ec9e735d3b3dc1d43dd32 --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 4 ++-- packages/db-dtypes/.github/auto-approve.yml | 3 +++ 2 files changed, 5 insertions(+), 2 deletions(-) create mode 100644 packages/db-dtypes/.github/auto-approve.yml diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index b631901e99f4..757c9dca75ad 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:f792ee1320e03eda2d13a5281a2989f7ed8a9e50b73ef6da97fac7e1e850b149 -# created: 2022-05-05T15:17:27.599381182Z + digest: sha256:81ed5ecdfc7cac5b699ba4537376f3563f6f04122c4ec9e735d3b3dc1d43dd32 +# created: 2022-05-05T22:08:23.383410683Z diff --git a/packages/db-dtypes/.github/auto-approve.yml b/packages/db-dtypes/.github/auto-approve.yml new file mode 100644 index 000000000000..311ebbb853a9 --- /dev/null +++ b/packages/db-dtypes/.github/auto-approve.yml @@ -0,0 +1,3 @@ +# https://github.com/googleapis/repo-automation-bots/tree/main/packages/auto-approve +processes: + - "OwlBotTemplateChanges" From 13b2b29a6ca52d072588a0212b743ff45ba40442 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sat, 7 May 2022 15:26:56 +0200 Subject: [PATCH 072/210] fix(deps): allow pyarrow v8 (#109) * chore(deps): update dependency pyarrow to v8 * fix(deps): allow pyarrow v8 Co-authored-by: Anthonios Partheniou --- packages/db-dtypes/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/db-dtypes/setup.py b/packages/db-dtypes/setup.py index 4a9073a69ab6..ece3ee5ccc27 100644 --- a/packages/db-dtypes/setup.py +++ b/packages/db-dtypes/setup.py @@ -31,7 +31,7 @@ dependencies = [ "packaging >= 17.0", "pandas >= 0.24.2, < 2.0dev", - "pyarrow>=3.0.0, <8.0dev", + "pyarrow>=3.0.0, <9.0dev", "numpy >= 1.16.6, < 2.0dev", ] From f865c3c8de82396c21e25cf25a89712aa8cad0d5 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 9 May 2022 10:21:29 -0400 Subject: [PATCH 073/210] chore(main): release 1.0.1 (#110) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/db-dtypes/CHANGELOG.md | 7 +++++++ packages/db-dtypes/db_dtypes/version.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/db-dtypes/CHANGELOG.md b/packages/db-dtypes/CHANGELOG.md index 44596f4bb4a4..25fb809851c7 100644 --- a/packages/db-dtypes/CHANGELOG.md +++ b/packages/db-dtypes/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [1.0.1](https://github.com/googleapis/python-db-dtypes-pandas/compare/v1.0.0...v1.0.1) (2022-05-07) + + +### Bug Fixes + +* **deps:** allow pyarrow v8 ([#109](https://github.com/googleapis/python-db-dtypes-pandas/issues/109)) ([fb30adf](https://github.com/googleapis/python-db-dtypes-pandas/commit/fb30adfd427d3df9919df00b096210ba1eb1b91d)) + ## [1.0.0](https://github.com/googleapis/python-db-dtypes-pandas/compare/v0.4.0...v1.0.0) (2022-03-25) diff --git a/packages/db-dtypes/db_dtypes/version.py b/packages/db-dtypes/db_dtypes/version.py index f1637e76c627..8b94495c6309 100644 --- a/packages/db-dtypes/db_dtypes/version.py +++ b/packages/db-dtypes/db_dtypes/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "1.0.0" +__version__ = "1.0.1" From 9f3c90f89ec28d66db20f557fc029f747701337b Mon Sep 17 00:00:00 2001 From: Dan Lee <71398022+dandhlee@users.noreply.github.com> Date: Wed, 1 Jun 2022 13:07:32 -0400 Subject: [PATCH 074/210] docs: fix changelog header to consistent size (#111) --- packages/db-dtypes/CHANGELOG.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/db-dtypes/CHANGELOG.md b/packages/db-dtypes/CHANGELOG.md index 25fb809851c7..a71a9543f506 100644 --- a/packages/db-dtypes/CHANGELOG.md +++ b/packages/db-dtypes/CHANGELOG.md @@ -1,6 +1,6 @@ # Changelog -### [1.0.1](https://github.com/googleapis/python-db-dtypes-pandas/compare/v1.0.0...v1.0.1) (2022-05-07) +## [1.0.1](https://github.com/googleapis/python-db-dtypes-pandas/compare/v1.0.0...v1.0.1) (2022-05-07) ### Bug Fixes @@ -43,7 +43,7 @@ * add dbtime compliance tests ([#90](https://github.com/googleapis/python-db-dtypes-pandas/issues/90)) ([f14fb2b](https://github.com/googleapis/python-db-dtypes-pandas/commit/f14fb2bf78d8427b9546db4cdad1d893c1b1e5e1)) * add final dbdate compliance tests and sort ([#89](https://github.com/googleapis/python-db-dtypes-pandas/issues/89)) ([efe7e6d](https://github.com/googleapis/python-db-dtypes-pandas/commit/efe7e6d8953ebf8d2b4d9468c7c92638ea2ec9f9)) -### [0.3.1](https://www.github.com/googleapis/python-db-dtypes-pandas/compare/v0.3.0...v0.3.1) (2021-12-04) +## [0.3.1](https://www.github.com/googleapis/python-db-dtypes-pandas/compare/v0.3.0...v0.3.1) (2021-12-04) ### Bug Fixes @@ -70,7 +70,7 @@ * add how-to guide and include API reference ([#33](https://www.github.com/googleapis/python-db-dtypes-pandas/issues/33)) ([878dce4](https://www.github.com/googleapis/python-db-dtypes-pandas/commit/878dce48bd6714706a2a829775ce00e61724fc7a)) -### [0.1.1](https://www.github.com/googleapis/python-db-dtypes-pandas/compare/v0.1.0...v0.1.1) (2021-10-04) +## [0.1.1](https://www.github.com/googleapis/python-db-dtypes-pandas/compare/v0.1.0...v0.1.1) (2021-10-04) ### Bug Fixes From 0ce2372825169d35a0c489df316acd64ecb7d87c Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 3 Jun 2022 10:10:26 -0400 Subject: [PATCH 075/210] chore: test minimum dependencies in python 3.7 (#113) --- packages/db-dtypes/testing/constraints-3.7.txt | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/packages/db-dtypes/testing/constraints-3.7.txt b/packages/db-dtypes/testing/constraints-3.7.txt index 0b3b3097c5df..a7388cdc2344 100644 --- a/packages/db-dtypes/testing/constraints-3.7.txt +++ b/packages/db-dtypes/testing/constraints-3.7.txt @@ -1,2 +1,11 @@ -# Make sure we test with pandas 1.1.0. The Python version isn't that relevant. -pandas==1.1.0 +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List *all* library dependencies and extras in this file. +# Pin the version to the lower bound. +# +# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", +packaging==17.0 +# Make sure we test with pandas 0.24.2. The Python version isn't that relevant. +pandas==0.24.2 +pyarrow==3.0.0 +numpy==1.16.6 From 7a69ef7d3c067769790cfe1a9b4b6dc14b3a9379 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 9 Jun 2022 10:32:45 -0400 Subject: [PATCH 076/210] chore(main): release 1.0.2 (#112) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/db-dtypes/CHANGELOG.md | 7 +++++++ packages/db-dtypes/db_dtypes/version.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/db-dtypes/CHANGELOG.md b/packages/db-dtypes/CHANGELOG.md index a71a9543f506..ec2012cad64a 100644 --- a/packages/db-dtypes/CHANGELOG.md +++ b/packages/db-dtypes/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.0.2](https://github.com/googleapis/python-db-dtypes-pandas/compare/v1.0.1...v1.0.2) (2022-06-03) + + +### Documentation + +* fix changelog header to consistent size ([#111](https://github.com/googleapis/python-db-dtypes-pandas/issues/111)) ([145f875](https://github.com/googleapis/python-db-dtypes-pandas/commit/145f8750682fb007343a57c7c94bc5e7fa5b63ab)) + ## [1.0.1](https://github.com/googleapis/python-db-dtypes-pandas/compare/v1.0.0...v1.0.1) (2022-05-07) diff --git a/packages/db-dtypes/db_dtypes/version.py b/packages/db-dtypes/db_dtypes/version.py index 8b94495c6309..f02885c69ed1 100644 --- a/packages/db-dtypes/db_dtypes/version.py +++ b/packages/db-dtypes/db_dtypes/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "1.0.1" +__version__ = "1.0.2" From 0318459e7935c749b67c9b5fa5a851389ee529cc Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 14 Jul 2022 13:39:16 -0400 Subject: [PATCH 077/210] fix: require python 3.7+ (#125) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(python): drop python 3.6 Source-Link: https://github.com/googleapis/synthtool/commit/4f89b13af10d086458f9b379e56a614f9d6dab7b Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:e7bb19d47c13839fe8c147e50e02e8b6cf5da8edd1af8b82208cd6f66cc2829c * require python 3.7+ in setup.py * remove python 3.6 sample configs * update product documentation in .repo-metadata.json * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * exclude templated readme * require python 3.7+ in setup.py * remove python 3.6 from noxfile * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * update README Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 4 +- .../db-dtypes/.github/workflows/unittest.yml | 2 +- .../.kokoro/continuous/prerelease-deps.cfg | 7 ++ .../.kokoro/presubmit/prerelease-deps.cfg | 7 ++ .../.kokoro/samples/python3.6/common.cfg | 40 --------- .../.kokoro/samples/python3.6/continuous.cfg | 7 -- .../samples/python3.6/periodic-head.cfg | 11 --- .../.kokoro/samples/python3.6/periodic.cfg | 6 -- .../.kokoro/samples/python3.6/presubmit.cfg | 6 -- .../db-dtypes/.kokoro/test-samples-impl.sh | 4 +- packages/db-dtypes/.repo-metadata.json | 4 +- packages/db-dtypes/CONTRIBUTING.rst | 6 +- packages/db-dtypes/README.rst | 4 +- packages/db-dtypes/noxfile.py | 90 ++++++++++++++++++- packages/db-dtypes/owlbot.py | 3 +- .../db-dtypes/samples/snippets/noxfile.py | 2 +- .../templates/install_deps.tmpl.rst | 2 +- packages/db-dtypes/setup.py | 3 +- 18 files changed, 119 insertions(+), 89 deletions(-) create mode 100644 packages/db-dtypes/.kokoro/continuous/prerelease-deps.cfg create mode 100644 packages/db-dtypes/.kokoro/presubmit/prerelease-deps.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/python3.6/common.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/python3.6/continuous.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/python3.6/periodic-head.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/python3.6/periodic.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/python3.6/presubmit.cfg diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 757c9dca75ad..1ce608523524 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:81ed5ecdfc7cac5b699ba4537376f3563f6f04122c4ec9e735d3b3dc1d43dd32 -# created: 2022-05-05T22:08:23.383410683Z + digest: sha256:e7bb19d47c13839fe8c147e50e02e8b6cf5da8edd1af8b82208cd6f66cc2829c +# created: 2022-07-05T18:31:20.838186805Z diff --git a/packages/db-dtypes/.github/workflows/unittest.yml b/packages/db-dtypes/.github/workflows/unittest.yml index e5be6edbd54d..5531b0141297 100644 --- a/packages/db-dtypes/.github/workflows/unittest.yml +++ b/packages/db-dtypes/.github/workflows/unittest.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ['3.6', '3.7', '3.8', '3.9', '3.10'] + python: ['3.7', '3.8', '3.9', '3.10'] steps: - name: Checkout uses: actions/checkout@v3 diff --git a/packages/db-dtypes/.kokoro/continuous/prerelease-deps.cfg b/packages/db-dtypes/.kokoro/continuous/prerelease-deps.cfg new file mode 100644 index 000000000000..3595fb43f5c0 --- /dev/null +++ b/packages/db-dtypes/.kokoro/continuous/prerelease-deps.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Only run this nox session. +env_vars: { + key: "NOX_SESSION" + value: "prerelease_deps" +} diff --git a/packages/db-dtypes/.kokoro/presubmit/prerelease-deps.cfg b/packages/db-dtypes/.kokoro/presubmit/prerelease-deps.cfg new file mode 100644 index 000000000000..3595fb43f5c0 --- /dev/null +++ b/packages/db-dtypes/.kokoro/presubmit/prerelease-deps.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Only run this nox session. +env_vars: { + key: "NOX_SESSION" + value: "prerelease_deps" +} diff --git a/packages/db-dtypes/.kokoro/samples/python3.6/common.cfg b/packages/db-dtypes/.kokoro/samples/python3.6/common.cfg deleted file mode 100644 index 84c31ca6e7ef..000000000000 --- a/packages/db-dtypes/.kokoro/samples/python3.6/common.cfg +++ /dev/null @@ -1,40 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "py-3.6" -} - -# Declare build specific Cloud project. -env_vars: { - key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-py36" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-db-dtypes-pandas/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-db-dtypes-pandas/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.6/continuous.cfg b/packages/db-dtypes/.kokoro/samples/python3.6/continuous.cfg deleted file mode 100644 index 7218af1499e5..000000000000 --- a/packages/db-dtypes/.kokoro/samples/python3.6/continuous.cfg +++ /dev/null @@ -1,7 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - diff --git a/packages/db-dtypes/.kokoro/samples/python3.6/periodic-head.cfg b/packages/db-dtypes/.kokoro/samples/python3.6/periodic-head.cfg deleted file mode 100644 index ee3d56408db9..000000000000 --- a/packages/db-dtypes/.kokoro/samples/python3.6/periodic-head.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-db-dtypes-pandas/.kokoro/test-samples-against-head.sh" -} diff --git a/packages/db-dtypes/.kokoro/samples/python3.6/periodic.cfg b/packages/db-dtypes/.kokoro/samples/python3.6/periodic.cfg deleted file mode 100644 index 71cd1e597e38..000000000000 --- a/packages/db-dtypes/.kokoro/samples/python3.6/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} diff --git a/packages/db-dtypes/.kokoro/samples/python3.6/presubmit.cfg b/packages/db-dtypes/.kokoro/samples/python3.6/presubmit.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/db-dtypes/.kokoro/samples/python3.6/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/test-samples-impl.sh b/packages/db-dtypes/.kokoro/test-samples-impl.sh index 8a324c9c7bc6..2c6500cae0b9 100755 --- a/packages/db-dtypes/.kokoro/test-samples-impl.sh +++ b/packages/db-dtypes/.kokoro/test-samples-impl.sh @@ -33,7 +33,7 @@ export PYTHONUNBUFFERED=1 env | grep KOKORO # Install nox -python3.6 -m pip install --upgrade --quiet nox +python3.9 -m pip install --upgrade --quiet nox # Use secrets acessor service account to get secrets if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then @@ -76,7 +76,7 @@ for file in samples/**/requirements.txt; do echo "------------------------------------------------------------" # Use nox to execute the tests for the project. - python3.6 -m nox -s "$RUN_TESTS_SESSION" + python3.9 -m nox -s "$RUN_TESTS_SESSION" EXIT=$? # If this is a periodic build, send the test log to the FlakyBot. diff --git a/packages/db-dtypes/.repo-metadata.json b/packages/db-dtypes/.repo-metadata.json index 027751fe0b26..761c9991facc 100644 --- a/packages/db-dtypes/.repo-metadata.json +++ b/packages/db-dtypes/.repo-metadata.json @@ -1,6 +1,7 @@ { "name": "db-dtypes", "name_pretty": "Pandas Data Types for SQL systems (BigQuery, Spanner)", + "product_documentation": "https://pandas.pydata.org/pandas-docs/stable/ecosystem.html#ecosystem-extensions", "client_documentation": "https://googleapis.dev/python/db-dtypes/latest/index.html", "release_level": "stable", "language": "python", @@ -9,5 +10,6 @@ "distribution_name": "db-dtypes", "api_id": "bigquery.googleapis.com", "default_version": "", - "codeowner_team": "@googleapis/api-bigquery" + "codeowner_team": "@googleapis/api-bigquery", + "api_description": "Pandas extension data types for data from SQL systems such as BigQuery." } diff --git a/packages/db-dtypes/CONTRIBUTING.rst b/packages/db-dtypes/CONTRIBUTING.rst index 22f6382c7032..5f36f12da0ac 100644 --- a/packages/db-dtypes/CONTRIBUTING.rst +++ b/packages/db-dtypes/CONTRIBUTING.rst @@ -22,7 +22,7 @@ In order to add a feature: documentation. - The feature must work fully on the following CPython versions: - 3.6, 3.7, 3.8, 3.9 and 3.10 on both UNIX and Windows. + 3.7, 3.8, 3.9 and 3.10 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -221,13 +221,11 @@ Supported Python Versions We support: -- `Python 3.6`_ - `Python 3.7`_ - `Python 3.8`_ - `Python 3.9`_ - `Python 3.10`_ -.. _Python 3.6: https://docs.python.org/3.6/ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ .. _Python 3.9: https://docs.python.org/3.9/ @@ -239,7 +237,7 @@ Supported versions can be found in our ``noxfile.py`` `config`_. .. _config: https://github.com/googleapis/python-db-dtypes-pandas/blob/main/noxfile.py -We also explicitly decided to support Python 3 beginning with version 3.6. +We also explicitly decided to support Python 3 beginning with version 3.7. Reasons for this include: - Encouraging use of newest versions of Python 3 diff --git a/packages/db-dtypes/README.rst b/packages/db-dtypes/README.rst index 8ea047c503ea..4288be59ec25 100644 --- a/packages/db-dtypes/README.rst +++ b/packages/db-dtypes/README.rst @@ -34,11 +34,11 @@ dependencies. Supported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^ -Python >= 3.6 +Python >= 3.7 Unsupported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Python <= 3.5. +Python <= 3.6. Mac/Linux diff --git a/packages/db-dtypes/noxfile.py b/packages/db-dtypes/noxfile.py index d4172902b2e4..a6ef0a7ed27e 100644 --- a/packages/db-dtypes/noxfile.py +++ b/packages/db-dtypes/noxfile.py @@ -20,6 +20,7 @@ import os import pathlib import re +import re import shutil import warnings @@ -31,7 +32,7 @@ DEFAULT_PYTHON_VERSION = "3.8" -UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] UNIT_TEST_STANDARD_DEPENDENCIES = [ "mock", "asyncmock", @@ -421,3 +422,90 @@ def docfx(session): os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), ) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + session.install(*UNIT_TEST_STANDARD_DEPENDENCIES) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + if os.path.exists("samples/snippets/requirements.txt"): + session.install("-r", "samples/snippets/requirements.txt") + + if os.path.exists("samples/snippets/requirements-test.txt"): + session.install("-r", "samples/snippets/requirements-test.txt") + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + "grpcio", + "grpcio-status", + "google-api-core", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + "google-auth", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + + session.run("py.test", "tests/unit") + + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Only run system tests if found. + if os.path.exists(system_test_path) or os.path.exists(system_test_folder_path): + session.run("py.test", "tests/system") + + snippets_test_path = os.path.join("samples", "snippets") + + # Only run samples tests if found. + if os.path.exists(snippets_test_path): + session.run("py.test", "samples/snippets") diff --git a/packages/db-dtypes/owlbot.py b/packages/db-dtypes/owlbot.py index 383ae3714d66..4b89096dfad5 100644 --- a/packages/db-dtypes/owlbot.py +++ b/packages/db-dtypes/owlbot.py @@ -28,14 +28,13 @@ # Add templated files # ---------------------------------------------------------------------------- templated_files = common.py_library( - unit_test_python_versions=["3.6", "3.7", "3.8", "3.9", "3.10"], system_test_python_versions=["3.8"], cov_level=100, intersphinx_dependencies={ "pandas": "https://pandas.pydata.org/pandas-docs/stable/" }, ) -s.move(templated_files, excludes=["docs/multiprocessing.rst"]) +s.move(templated_files, excludes=["docs/multiprocessing.rst", "README.rst"]) # ---------------------------------------------------------------------------- # Fixup files diff --git a/packages/db-dtypes/samples/snippets/noxfile.py b/packages/db-dtypes/samples/snippets/noxfile.py index a40410b56369..29b5bc852183 100644 --- a/packages/db-dtypes/samples/snippets/noxfile.py +++ b/packages/db-dtypes/samples/snippets/noxfile.py @@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] diff --git a/packages/db-dtypes/scripts/readme-gen/templates/install_deps.tmpl.rst b/packages/db-dtypes/scripts/readme-gen/templates/install_deps.tmpl.rst index 275d649890d7..6f069c6c87a5 100644 --- a/packages/db-dtypes/scripts/readme-gen/templates/install_deps.tmpl.rst +++ b/packages/db-dtypes/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -12,7 +12,7 @@ Install Dependencies .. _Python Development Environment Setup Guide: https://cloud.google.com/python/setup -#. Create a virtualenv. Samples are compatible with Python 3.6+. +#. Create a virtualenv. Samples are compatible with Python 3.7+. .. code-block:: bash diff --git a/packages/db-dtypes/setup.py b/packages/db-dtypes/setup.py index ece3ee5ccc27..67403c21bee7 100644 --- a/packages/db-dtypes/setup.py +++ b/packages/db-dtypes/setup.py @@ -62,7 +62,6 @@ def readme(): "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", @@ -72,6 +71,6 @@ def readme(): ], platforms="Posix; MacOS X; Windows", install_requires=dependencies, - python_requires=">=3.6, <3.11", + python_requires=">=3.7, <3.11", tests_require=["pytest"], ) From 298a8171b3c105e15b87e361823171c3b5278274 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 15 Jul 2022 14:21:07 +0200 Subject: [PATCH 078/210] chore(deps): update actions/setup-python action to v4 (#114) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update actions/setup-python action to v4 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- packages/db-dtypes/.github/workflows/compliance.yml | 4 ++-- packages/db-dtypes/.github/workflows/unittest-prerelease.yml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/db-dtypes/.github/workflows/compliance.yml b/packages/db-dtypes/.github/workflows/compliance.yml index eca8cc202617..aa49744df5b5 100644 --- a/packages/db-dtypes/.github/workflows/compliance.yml +++ b/packages/db-dtypes/.github/workflows/compliance.yml @@ -13,7 +13,7 @@ jobs: - name: Checkout uses: actions/checkout@v3 - name: Setup Python - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python }} - name: Install nox @@ -34,7 +34,7 @@ jobs: - name: Checkout uses: actions/checkout@v3 - name: Setup Python - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python }} - name: Install nox diff --git a/packages/db-dtypes/.github/workflows/unittest-prerelease.yml b/packages/db-dtypes/.github/workflows/unittest-prerelease.yml index a11568ace889..1cf125c0a223 100644 --- a/packages/db-dtypes/.github/workflows/unittest-prerelease.yml +++ b/packages/db-dtypes/.github/workflows/unittest-prerelease.yml @@ -13,7 +13,7 @@ jobs: - name: Checkout uses: actions/checkout@v3 - name: Setup Python - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python }} - name: Install nox From 7fa2a06206e56e746276a2edfd9a73ea164b43f8 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 5 Aug 2022 22:05:28 +0200 Subject: [PATCH 079/210] fix(deps): allow pyarrow < 10 (#130) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix(deps): allow pyarrow < 10 Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- packages/db-dtypes/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/db-dtypes/setup.py b/packages/db-dtypes/setup.py index 67403c21bee7..7b4eb413a6a4 100644 --- a/packages/db-dtypes/setup.py +++ b/packages/db-dtypes/setup.py @@ -31,7 +31,7 @@ dependencies = [ "packaging >= 17.0", "pandas >= 0.24.2, < 2.0dev", - "pyarrow>=3.0.0, <9.0dev", + "pyarrow>=3.0.0, <10.0dev", "numpy >= 1.16.6, < 2.0dev", ] From 4c138e7663e8a74476a44912f49ff2dc2b2cb9b3 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 8 Aug 2022 12:28:06 -0400 Subject: [PATCH 080/210] chore(main): release 1.0.3 (#128) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/db-dtypes/CHANGELOG.md | 8 ++++++++ packages/db-dtypes/db_dtypes/version.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/db-dtypes/CHANGELOG.md b/packages/db-dtypes/CHANGELOG.md index ec2012cad64a..53db3c1bc4c3 100644 --- a/packages/db-dtypes/CHANGELOG.md +++ b/packages/db-dtypes/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [1.0.3](https://github.com/googleapis/python-db-dtypes-pandas/compare/v1.0.2...v1.0.3) (2022-08-05) + + +### Bug Fixes + +* **deps:** allow pyarrow < 10 ([#130](https://github.com/googleapis/python-db-dtypes-pandas/issues/130)) ([508564f](https://github.com/googleapis/python-db-dtypes-pandas/commit/508564f1b898ec1ad7cae4c826ab3ad4b9a5349e)) +* require python 3.7+ ([#125](https://github.com/googleapis/python-db-dtypes-pandas/issues/125)) ([bce01df](https://github.com/googleapis/python-db-dtypes-pandas/commit/bce01dfe92815ea478e1db4166e629062ec5ff97)) + ## [1.0.2](https://github.com/googleapis/python-db-dtypes-pandas/compare/v1.0.1...v1.0.2) (2022-06-03) diff --git a/packages/db-dtypes/db_dtypes/version.py b/packages/db-dtypes/db_dtypes/version.py index f02885c69ed1..e2a9429ca141 100644 --- a/packages/db-dtypes/db_dtypes/version.py +++ b/packages/db-dtypes/db_dtypes/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "1.0.2" +__version__ = "1.0.3" From 32befb6aa8d5a16f576f2930a0f2afbe23be2d16 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 2 Sep 2022 10:53:58 -0400 Subject: [PATCH 081/210] chore(python): update .kokoro/requirements.txt (#141) Source-Link: https://github.com/googleapis/synthtool/commit/703554a14c7479542335b62fa69279f93a9e38ec Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:94961fdc5c9ca6d13530a6a414a49d2f607203168215d074cdb0a1df9ec31c0b Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 3 +- packages/db-dtypes/.github/workflows/docs.yml | 4 +- packages/db-dtypes/.github/workflows/lint.yml | 2 +- .../db-dtypes/.github/workflows/unittest.yml | 4 +- packages/db-dtypes/.kokoro/publish-docs.sh | 4 +- packages/db-dtypes/.kokoro/release.sh | 5 +- packages/db-dtypes/.kokoro/requirements.in | 8 + packages/db-dtypes/.kokoro/requirements.txt | 472 ++++++++++++++++++ packages/db-dtypes/noxfile.py | 40 +- packages/db-dtypes/renovate.json | 2 +- 10 files changed, 512 insertions(+), 32 deletions(-) create mode 100644 packages/db-dtypes/.kokoro/requirements.in create mode 100644 packages/db-dtypes/.kokoro/requirements.txt diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 1ce608523524..2fa0f7c4fe15 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:e7bb19d47c13839fe8c147e50e02e8b6cf5da8edd1af8b82208cd6f66cc2829c -# created: 2022-07-05T18:31:20.838186805Z + digest: sha256:94961fdc5c9ca6d13530a6a414a49d2f607203168215d074cdb0a1df9ec31c0b diff --git a/packages/db-dtypes/.github/workflows/docs.yml b/packages/db-dtypes/.github/workflows/docs.yml index b46d7305d8cf..7092a139aed3 100644 --- a/packages/db-dtypes/.github/workflows/docs.yml +++ b/packages/db-dtypes/.github/workflows/docs.yml @@ -10,7 +10,7 @@ jobs: - name: Checkout uses: actions/checkout@v3 - name: Setup Python - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: "3.10" - name: Install nox @@ -26,7 +26,7 @@ jobs: - name: Checkout uses: actions/checkout@v3 - name: Setup Python - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: "3.10" - name: Install nox diff --git a/packages/db-dtypes/.github/workflows/lint.yml b/packages/db-dtypes/.github/workflows/lint.yml index f512a4960beb..d2aee5b7d8ec 100644 --- a/packages/db-dtypes/.github/workflows/lint.yml +++ b/packages/db-dtypes/.github/workflows/lint.yml @@ -10,7 +10,7 @@ jobs: - name: Checkout uses: actions/checkout@v3 - name: Setup Python - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: "3.10" - name: Install nox diff --git a/packages/db-dtypes/.github/workflows/unittest.yml b/packages/db-dtypes/.github/workflows/unittest.yml index 5531b0141297..87ade4d54362 100644 --- a/packages/db-dtypes/.github/workflows/unittest.yml +++ b/packages/db-dtypes/.github/workflows/unittest.yml @@ -13,7 +13,7 @@ jobs: - name: Checkout uses: actions/checkout@v3 - name: Setup Python - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python }} - name: Install nox @@ -39,7 +39,7 @@ jobs: - name: Checkout uses: actions/checkout@v3 - name: Setup Python - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: "3.10" - name: Install coverage diff --git a/packages/db-dtypes/.kokoro/publish-docs.sh b/packages/db-dtypes/.kokoro/publish-docs.sh index 8acb14e802b0..1c4d62370042 100755 --- a/packages/db-dtypes/.kokoro/publish-docs.sh +++ b/packages/db-dtypes/.kokoro/publish-docs.sh @@ -21,14 +21,12 @@ export PYTHONUNBUFFERED=1 export PATH="${HOME}/.local/bin:${PATH}" # Install nox -python3 -m pip install --user --upgrade --quiet nox +python3 -m pip install --require-hashes -r .kokoro/requirements.txt python3 -m nox --version # build docs nox -s docs -python3 -m pip install --user gcp-docuploader - # create metadata python3 -m docuploader create-metadata \ --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ diff --git a/packages/db-dtypes/.kokoro/release.sh b/packages/db-dtypes/.kokoro/release.sh index aa6052b514ee..2cbba13c8091 100755 --- a/packages/db-dtypes/.kokoro/release.sh +++ b/packages/db-dtypes/.kokoro/release.sh @@ -16,12 +16,9 @@ set -eo pipefail # Start the releasetool reporter -python3 -m pip install gcp-releasetool +python3 -m pip install --require-hashes -r github/python-db-dtypes-pandas/.kokoro/requirements.txt python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script -# Ensure that we have the latest versions of Twine, Wheel, and Setuptools. -python3 -m pip install --upgrade twine wheel setuptools - # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 diff --git a/packages/db-dtypes/.kokoro/requirements.in b/packages/db-dtypes/.kokoro/requirements.in new file mode 100644 index 000000000000..7718391a34d7 --- /dev/null +++ b/packages/db-dtypes/.kokoro/requirements.in @@ -0,0 +1,8 @@ +gcp-docuploader +gcp-releasetool +importlib-metadata +typing-extensions +twine +wheel +setuptools +nox \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/requirements.txt b/packages/db-dtypes/.kokoro/requirements.txt new file mode 100644 index 000000000000..385f2d4d6106 --- /dev/null +++ b/packages/db-dtypes/.kokoro/requirements.txt @@ -0,0 +1,472 @@ +# +# This file is autogenerated by pip-compile with python 3.10 +# To update, run: +# +# pip-compile --allow-unsafe --generate-hashes requirements.in +# +argcomplete==2.0.0 \ + --hash=sha256:6372ad78c89d662035101418ae253668445b391755cfe94ea52f1b9d22425b20 \ + --hash=sha256:cffa11ea77999bb0dd27bb25ff6dc142a6796142f68d45b1a26b11f58724561e + # via nox +attrs==22.1.0 \ + --hash=sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6 \ + --hash=sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c + # via gcp-releasetool +bleach==5.0.1 \ + --hash=sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a \ + --hash=sha256:0d03255c47eb9bd2f26aa9bb7f2107732e7e8fe195ca2f64709fcf3b0a4a085c + # via readme-renderer +cachetools==5.2.0 \ + --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ + --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db + # via google-auth +certifi==2022.6.15 \ + --hash=sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d \ + --hash=sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412 + # via requests +cffi==1.15.1 \ + --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ + --hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \ + --hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \ + --hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \ + --hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \ + --hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \ + --hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \ + --hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \ + --hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \ + --hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \ + --hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \ + --hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \ + --hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \ + --hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \ + --hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \ + --hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \ + --hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \ + --hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \ + --hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \ + --hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \ + --hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \ + --hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \ + --hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \ + --hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \ + --hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \ + --hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \ + --hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \ + --hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \ + --hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \ + --hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \ + --hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \ + --hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \ + --hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \ + --hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \ + --hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \ + --hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \ + --hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \ + --hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \ + --hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \ + --hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \ + --hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \ + --hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \ + --hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \ + --hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \ + --hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \ + --hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \ + --hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \ + --hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \ + --hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \ + --hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \ + --hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \ + --hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \ + --hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \ + --hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \ + --hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \ + --hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \ + --hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \ + --hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \ + --hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \ + --hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \ + --hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \ + --hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \ + --hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \ + --hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0 + # via cryptography +charset-normalizer==2.1.1 \ + --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ + --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f + # via requests +click==8.0.4 \ + --hash=sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1 \ + --hash=sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb + # via + # gcp-docuploader + # gcp-releasetool +colorlog==6.7.0 \ + --hash=sha256:0d33ca236784a1ba3ff9c532d4964126d8a2c44f1f0cb1d2b0728196f512f662 \ + --hash=sha256:bd94bd21c1e13fac7bd3153f4bc3a7dc0eb0974b8bc2fdf1a989e474f6e582e5 + # via + # gcp-docuploader + # nox +commonmark==0.9.1 \ + --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ + --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 + # via rich +cryptography==37.0.4 \ + --hash=sha256:190f82f3e87033821828f60787cfa42bff98404483577b591429ed99bed39d59 \ + --hash=sha256:2be53f9f5505673eeda5f2736bea736c40f051a739bfae2f92d18aed1eb54596 \ + --hash=sha256:30788e070800fec9bbcf9faa71ea6d8068f5136f60029759fd8c3efec3c9dcb3 \ + --hash=sha256:3d41b965b3380f10e4611dbae366f6dc3cefc7c9ac4e8842a806b9672ae9add5 \ + --hash=sha256:4c590ec31550a724ef893c50f9a97a0c14e9c851c85621c5650d699a7b88f7ab \ + --hash=sha256:549153378611c0cca1042f20fd9c5030d37a72f634c9326e225c9f666d472884 \ + --hash=sha256:63f9c17c0e2474ccbebc9302ce2f07b55b3b3fcb211ded18a42d5764f5c10a82 \ + --hash=sha256:6bc95ed67b6741b2607298f9ea4932ff157e570ef456ef7ff0ef4884a134cc4b \ + --hash=sha256:7099a8d55cd49b737ffc99c17de504f2257e3787e02abe6d1a6d136574873441 \ + --hash=sha256:75976c217f10d48a8b5a8de3d70c454c249e4b91851f6838a4e48b8f41eb71aa \ + --hash=sha256:7bc997818309f56c0038a33b8da5c0bfbb3f1f067f315f9abd6fc07ad359398d \ + --hash=sha256:80f49023dd13ba35f7c34072fa17f604d2f19bf0989f292cedf7ab5770b87a0b \ + --hash=sha256:91ce48d35f4e3d3f1d83e29ef4a9267246e6a3be51864a5b7d2247d5086fa99a \ + --hash=sha256:a958c52505c8adf0d3822703078580d2c0456dd1d27fabfb6f76fe63d2971cd6 \ + --hash=sha256:b62439d7cd1222f3da897e9a9fe53bbf5c104fff4d60893ad1355d4c14a24157 \ + --hash=sha256:b7f8dd0d4c1f21759695c05a5ec8536c12f31611541f8904083f3dc582604280 \ + --hash=sha256:d204833f3c8a33bbe11eda63a54b1aad7aa7456ed769a982f21ec599ba5fa282 \ + --hash=sha256:e007f052ed10cc316df59bc90fbb7ff7950d7e2919c9757fd42a2b8ecf8a5f67 \ + --hash=sha256:f2dcb0b3b63afb6df7fd94ec6fbddac81b5492513f7b0436210d390c14d46ee8 \ + --hash=sha256:f721d1885ecae9078c3f6bbe8a88bc0786b6e749bf32ccec1ef2b18929a05046 \ + --hash=sha256:f7a6de3e98771e183645181b3627e2563dcde3ce94a9e42a3f427d2255190327 \ + --hash=sha256:f8c0a6e9e1dd3eb0414ba320f85da6b0dcbd543126e30fcc546e7372a7fbf3b9 + # via + # gcp-releasetool + # secretstorage +distlib==0.3.6 \ + --hash=sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46 \ + --hash=sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e + # via virtualenv +docutils==0.19 \ + --hash=sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6 \ + --hash=sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc + # via readme-renderer +filelock==3.8.0 \ + --hash=sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc \ + --hash=sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4 + # via virtualenv +gcp-docuploader==0.6.3 \ + --hash=sha256:ba8c9d76b3bbac54b0311c503a373b00edc2dc02d6d54ea9507045adb8e870f7 \ + --hash=sha256:c0f5aaa82ce1854a386197e4e359b120ad6d4e57ae2c812fce42219a3288026b + # via -r requirements.in +gcp-releasetool==1.8.7 \ + --hash=sha256:3d2a67c9db39322194afb3b427e9cb0476ce8f2a04033695f0aeb63979fc2b37 \ + --hash=sha256:5e4d28f66e90780d77f3ecf1e9155852b0c3b13cbccb08ab07e66b2357c8da8d + # via -r requirements.in +google-api-core==2.8.2 \ + --hash=sha256:06f7244c640322b508b125903bb5701bebabce8832f85aba9335ec00b3d02edc \ + --hash=sha256:93c6a91ccac79079ac6bbf8b74ee75db970cc899278b97d53bc012f35908cf50 + # via + # google-cloud-core + # google-cloud-storage +google-auth==2.11.0 \ + --hash=sha256:be62acaae38d0049c21ca90f27a23847245c9f161ff54ede13af2cb6afecbac9 \ + --hash=sha256:ed65ecf9f681832298e29328e1ef0a3676e3732b2e56f41532d45f70a22de0fb + # via + # gcp-releasetool + # google-api-core + # google-cloud-core + # google-cloud-storage +google-cloud-core==2.3.2 \ + --hash=sha256:8417acf6466be2fa85123441696c4badda48db314c607cf1e5d543fa8bdc22fe \ + --hash=sha256:b9529ee7047fd8d4bf4a2182de619154240df17fbe60ead399078c1ae152af9a + # via google-cloud-storage +google-cloud-storage==2.5.0 \ + --hash=sha256:19a26c66c317ce542cea0830b7e787e8dac2588b6bfa4d3fd3b871ba16305ab0 \ + --hash=sha256:382f34b91de2212e3c2e7b40ec079d27ee2e3dbbae99b75b1bcd8c63063ce235 + # via gcp-docuploader +google-crc32c==1.3.0 \ + --hash=sha256:04e7c220798a72fd0f08242bc8d7a05986b2a08a0573396187fd32c1dcdd58b3 \ + --hash=sha256:05340b60bf05b574159e9bd940152a47d38af3fb43803ffe71f11d704b7696a6 \ + --hash=sha256:12674a4c3b56b706153a358eaa1018c4137a5a04635b92b4652440d3d7386206 \ + --hash=sha256:127f9cc3ac41b6a859bd9dc4321097b1a4f6aa7fdf71b4f9227b9e3ebffb4422 \ + --hash=sha256:13af315c3a0eec8bb8b8d80b8b128cb3fcd17d7e4edafc39647846345a3f003a \ + --hash=sha256:1926fd8de0acb9d15ee757175ce7242e235482a783cd4ec711cc999fc103c24e \ + --hash=sha256:226f2f9b8e128a6ca6a9af9b9e8384f7b53a801907425c9a292553a3a7218ce0 \ + --hash=sha256:276de6273eb074a35bc598f8efbc00c7869c5cf2e29c90748fccc8c898c244df \ + --hash=sha256:318f73f5484b5671f0c7f5f63741ab020a599504ed81d209b5c7129ee4667407 \ + --hash=sha256:3bbce1be3687bbfebe29abdb7631b83e6b25da3f4e1856a1611eb21854b689ea \ + --hash=sha256:42ae4781333e331a1743445931b08ebdad73e188fd554259e772556fc4937c48 \ + --hash=sha256:58be56ae0529c664cc04a9c76e68bb92b091e0194d6e3c50bea7e0f266f73713 \ + --hash=sha256:5da2c81575cc3ccf05d9830f9e8d3c70954819ca9a63828210498c0774fda1a3 \ + --hash=sha256:6311853aa2bba4064d0c28ca54e7b50c4d48e3de04f6770f6c60ebda1e975267 \ + --hash=sha256:650e2917660e696041ab3dcd7abac160b4121cd9a484c08406f24c5964099829 \ + --hash=sha256:6a4db36f9721fdf391646685ecffa404eb986cbe007a3289499020daf72e88a2 \ + --hash=sha256:779cbf1ce375b96111db98fca913c1f5ec11b1d870e529b1dc7354b2681a8c3a \ + --hash=sha256:7f6fe42536d9dcd3e2ffb9d3053f5d05221ae3bbcefbe472bdf2c71c793e3183 \ + --hash=sha256:891f712ce54e0d631370e1f4997b3f182f3368179198efc30d477c75d1f44942 \ + --hash=sha256:95c68a4b9b7828ba0428f8f7e3109c5d476ca44996ed9a5f8aac6269296e2d59 \ + --hash=sha256:96a8918a78d5d64e07c8ea4ed2bc44354e3f93f46a4866a40e8db934e4c0d74b \ + --hash=sha256:9c3cf890c3c0ecfe1510a452a165431b5831e24160c5fcf2071f0f85ca5a47cd \ + --hash=sha256:9f58099ad7affc0754ae42e6d87443299f15d739b0ce03c76f515153a5cda06c \ + --hash=sha256:a0b9e622c3b2b8d0ce32f77eba617ab0d6768b82836391e4f8f9e2074582bf02 \ + --hash=sha256:a7f9cbea4245ee36190f85fe1814e2d7b1e5f2186381b082f5d59f99b7f11328 \ + --hash=sha256:bab4aebd525218bab4ee615786c4581952eadc16b1ff031813a2fd51f0cc7b08 \ + --hash=sha256:c124b8c8779bf2d35d9b721e52d4adb41c9bfbde45e6a3f25f0820caa9aba73f \ + --hash=sha256:c9da0a39b53d2fab3e5467329ed50e951eb91386e9d0d5b12daf593973c3b168 \ + --hash=sha256:ca60076c388728d3b6ac3846842474f4250c91efbfe5afa872d3ffd69dd4b318 \ + --hash=sha256:cb6994fff247987c66a8a4e550ef374671c2b82e3c0d2115e689d21e511a652d \ + --hash=sha256:d1c1d6236feab51200272d79b3d3e0f12cf2cbb12b208c835b175a21efdb0a73 \ + --hash=sha256:dd7760a88a8d3d705ff562aa93f8445ead54f58fd482e4f9e2bafb7e177375d4 \ + --hash=sha256:dda4d8a3bb0b50f540f6ff4b6033f3a74e8bf0bd5320b70fab2c03e512a62812 \ + --hash=sha256:e0f1ff55dde0ebcfbef027edc21f71c205845585fffe30d4ec4979416613e9b3 \ + --hash=sha256:e7a539b9be7b9c00f11ef16b55486141bc2cdb0c54762f84e3c6fc091917436d \ + --hash=sha256:eb0b14523758e37802f27b7f8cd973f5f3d33be7613952c0df904b68c4842f0e \ + --hash=sha256:ed447680ff21c14aaceb6a9f99a5f639f583ccfe4ce1a5e1d48eb41c3d6b3217 \ + --hash=sha256:f52a4ad2568314ee713715b1e2d79ab55fab11e8b304fd1462ff5cccf4264b3e \ + --hash=sha256:fbd60c6aaa07c31d7754edbc2334aef50601b7f1ada67a96eb1eb57c7c72378f \ + --hash=sha256:fc28e0db232c62ca0c3600884933178f0825c99be4474cdd645e378a10588125 \ + --hash=sha256:fe31de3002e7b08eb20823b3735b97c86c5926dd0581c7710a680b418a8709d4 \ + --hash=sha256:fec221a051150eeddfdfcff162e6db92c65ecf46cb0f7bb1bf812a1520ec026b \ + --hash=sha256:ff71073ebf0e42258a42a0b34f2c09ec384977e7f6808999102eedd5b49920e3 + # via google-resumable-media +google-resumable-media==2.3.3 \ + --hash=sha256:27c52620bd364d1c8116eaac4ea2afcbfb81ae9139fb3199652fcac1724bfb6c \ + --hash=sha256:5b52774ea7a829a8cdaa8bd2d4c3d4bc660c91b30857ab2668d0eb830f4ea8c5 + # via google-cloud-storage +googleapis-common-protos==1.56.4 \ + --hash=sha256:8eb2cbc91b69feaf23e32452a7ae60e791e09967d81d4fcc7fc388182d1bd394 \ + --hash=sha256:c25873c47279387cfdcbdafa36149887901d36202cb645a0e4f29686bf6e4417 + # via google-api-core +idna==3.3 \ + --hash=sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff \ + --hash=sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d + # via requests +importlib-metadata==4.12.0 \ + --hash=sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670 \ + --hash=sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23 + # via + # -r requirements.in + # twine +jaraco-classes==3.2.2 \ + --hash=sha256:6745f113b0b588239ceb49532aa09c3ebb947433ce311ef2f8e3ad64ebb74594 \ + --hash=sha256:e6ef6fd3fcf4579a7a019d87d1e56a883f4e4c35cfe925f86731abc58804e647 + # via keyring +jeepney==0.8.0 \ + --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ + --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755 + # via + # keyring + # secretstorage +jinja2==3.1.2 \ + --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ + --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 + # via gcp-releasetool +keyring==23.9.0 \ + --hash=sha256:4c32a31174faaee48f43a7e2c7e9c3216ec5e95acf22a2bebfb4a1d05056ee44 \ + --hash=sha256:98f060ec95ada2ab910c195a2d4317be6ef87936a766b239c46aa3c7aac4f0db + # via + # gcp-releasetool + # twine +markupsafe==2.1.1 \ + --hash=sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003 \ + --hash=sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88 \ + --hash=sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5 \ + --hash=sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7 \ + --hash=sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a \ + --hash=sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603 \ + --hash=sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1 \ + --hash=sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135 \ + --hash=sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247 \ + --hash=sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6 \ + --hash=sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601 \ + --hash=sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77 \ + --hash=sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02 \ + --hash=sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e \ + --hash=sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63 \ + --hash=sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f \ + --hash=sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980 \ + --hash=sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b \ + --hash=sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812 \ + --hash=sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff \ + --hash=sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96 \ + --hash=sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1 \ + --hash=sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925 \ + --hash=sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a \ + --hash=sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6 \ + --hash=sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e \ + --hash=sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f \ + --hash=sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4 \ + --hash=sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f \ + --hash=sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3 \ + --hash=sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c \ + --hash=sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a \ + --hash=sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417 \ + --hash=sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a \ + --hash=sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a \ + --hash=sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37 \ + --hash=sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452 \ + --hash=sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933 \ + --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ + --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 + # via jinja2 +more-itertools==8.14.0 \ + --hash=sha256:1bc4f91ee5b1b31ac7ceacc17c09befe6a40a503907baf9c839c229b5095cfd2 \ + --hash=sha256:c09443cd3d5438b8dafccd867a6bc1cb0894389e90cb53d227456b0b0bccb750 + # via jaraco-classes +nox==2022.8.7 \ + --hash=sha256:1b894940551dc5c389f9271d197ca5d655d40bdc6ccf93ed6880e4042760a34b \ + --hash=sha256:96cca88779e08282a699d672258ec01eb7c792d35bbbf538c723172bce23212c + # via -r requirements.in +packaging==21.3 \ + --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \ + --hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522 + # via + # gcp-releasetool + # nox +pkginfo==1.8.3 \ + --hash=sha256:848865108ec99d4901b2f7e84058b6e7660aae8ae10164e015a6dcf5b242a594 \ + --hash=sha256:a84da4318dd86f870a9447a8c98340aa06216bfc6f2b7bdc4b8766984ae1867c + # via twine +platformdirs==2.5.2 \ + --hash=sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788 \ + --hash=sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19 + # via virtualenv +protobuf==3.20.1 \ + --hash=sha256:06059eb6953ff01e56a25cd02cca1a9649a75a7e65397b5b9b4e929ed71d10cf \ + --hash=sha256:097c5d8a9808302fb0da7e20edf0b8d4703274d140fd25c5edabddcde43e081f \ + --hash=sha256:284f86a6207c897542d7e956eb243a36bb8f9564c1742b253462386e96c6b78f \ + --hash=sha256:32ca378605b41fd180dfe4e14d3226386d8d1b002ab31c969c366549e66a2bb7 \ + --hash=sha256:3cc797c9d15d7689ed507b165cd05913acb992d78b379f6014e013f9ecb20996 \ + --hash=sha256:62f1b5c4cd6c5402b4e2d63804ba49a327e0c386c99b1675c8a0fefda23b2067 \ + --hash=sha256:69ccfdf3657ba59569c64295b7d51325f91af586f8d5793b734260dfe2e94e2c \ + --hash=sha256:6f50601512a3d23625d8a85b1638d914a0970f17920ff39cec63aaef80a93fb7 \ + --hash=sha256:7403941f6d0992d40161aa8bb23e12575637008a5a02283a930addc0508982f9 \ + --hash=sha256:755f3aee41354ae395e104d62119cb223339a8f3276a0cd009ffabfcdd46bb0c \ + --hash=sha256:77053d28427a29987ca9caf7b72ccafee011257561259faba8dd308fda9a8739 \ + --hash=sha256:7e371f10abe57cee5021797126c93479f59fccc9693dafd6bd5633ab67808a91 \ + --hash=sha256:9016d01c91e8e625141d24ec1b20fed584703e527d28512aa8c8707f105a683c \ + --hash=sha256:9be73ad47579abc26c12024239d3540e6b765182a91dbc88e23658ab71767153 \ + --hash=sha256:adc31566d027f45efe3f44eeb5b1f329da43891634d61c75a5944e9be6dd42c9 \ + --hash=sha256:adfc6cf69c7f8c50fd24c793964eef18f0ac321315439d94945820612849c388 \ + --hash=sha256:af0ebadc74e281a517141daad9d0f2c5d93ab78e9d455113719a45a49da9db4e \ + --hash=sha256:cb29edb9eab15742d791e1025dd7b6a8f6fcb53802ad2f6e3adcb102051063ab \ + --hash=sha256:cd68be2559e2a3b84f517fb029ee611546f7812b1fdd0aa2ecc9bc6ec0e4fdde \ + --hash=sha256:cdee09140e1cd184ba9324ec1df410e7147242b94b5f8b0c64fc89e38a8ba531 \ + --hash=sha256:db977c4ca738dd9ce508557d4fce0f5aebd105e158c725beec86feb1f6bc20d8 \ + --hash=sha256:dd5789b2948ca702c17027c84c2accb552fc30f4622a98ab5c51fcfe8c50d3e7 \ + --hash=sha256:e250a42f15bf9d5b09fe1b293bdba2801cd520a9f5ea2d7fb7536d4441811d20 \ + --hash=sha256:ff8d8fa42675249bb456f5db06c00de6c2f4c27a065955917b28c4f15978b9c3 + # via + # gcp-docuploader + # gcp-releasetool + # google-api-core +py==1.11.0 \ + --hash=sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719 \ + --hash=sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378 + # via nox +pyasn1==0.4.8 \ + --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ + --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba + # via + # pyasn1-modules + # rsa +pyasn1-modules==0.2.8 \ + --hash=sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e \ + --hash=sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74 + # via google-auth +pycparser==2.21 \ + --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ + --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 + # via cffi +pygments==2.13.0 \ + --hash=sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1 \ + --hash=sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42 + # via + # readme-renderer + # rich +pyjwt==2.4.0 \ + --hash=sha256:72d1d253f32dbd4f5c88eaf1fdc62f3a19f676ccbadb9dbc5d07e951b2b26daf \ + --hash=sha256:d42908208c699b3b973cbeb01a969ba6a96c821eefb1c5bfe4c390c01d67abba + # via gcp-releasetool +pyparsing==3.0.9 \ + --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ + --hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc + # via packaging +pyperclip==1.8.2 \ + --hash=sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57 + # via gcp-releasetool +python-dateutil==2.8.2 \ + --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ + --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 + # via gcp-releasetool +readme-renderer==37.0 \ + --hash=sha256:07b7ea234e03e58f77cc222e206e6abb8f4c0435becce5104794ee591f9301c5 \ + --hash=sha256:9fa416704703e509eeb900696751c908ddeb2011319d93700d8f18baff887a69 + # via twine +requests==2.28.1 \ + --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \ + --hash=sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349 + # via + # gcp-releasetool + # google-api-core + # google-cloud-storage + # requests-toolbelt + # twine +requests-toolbelt==0.9.1 \ + --hash=sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f \ + --hash=sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0 + # via twine +rfc3986==2.0.0 \ + --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ + --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c + # via twine +rich==12.5.1 \ + --hash=sha256:2eb4e6894cde1e017976d2975ac210ef515d7548bc595ba20e195fb9628acdeb \ + --hash=sha256:63a5c5ce3673d3d5fbbf23cd87e11ab84b6b451436f1b7f19ec54b6bc36ed7ca + # via twine +rsa==4.9 \ + --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ + --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 + # via google-auth +secretstorage==3.3.3 \ + --hash=sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77 \ + --hash=sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99 + # via keyring +six==1.16.0 \ + --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ + --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 + # via + # bleach + # gcp-docuploader + # google-auth + # python-dateutil +twine==4.0.1 \ + --hash=sha256:42026c18e394eac3e06693ee52010baa5313e4811d5a11050e7d48436cf41b9e \ + --hash=sha256:96b1cf12f7ae611a4a40b6ae8e9570215daff0611828f5fe1f37a16255ab24a0 + # via -r requirements.in +typing-extensions==4.3.0 \ + --hash=sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02 \ + --hash=sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6 + # via -r requirements.in +urllib3==1.26.12 \ + --hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \ + --hash=sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997 + # via + # requests + # twine +virtualenv==20.16.4 \ + --hash=sha256:014f766e4134d0008dcaa1f95bafa0fb0f575795d07cae50b1bee514185d6782 \ + --hash=sha256:035ed57acce4ac35c82c9d8802202b0e71adac011a511ff650cbcf9635006a22 + # via nox +webencodings==0.5.1 \ + --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ + --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923 + # via bleach +wheel==0.37.1 \ + --hash=sha256:4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a \ + --hash=sha256:e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d49529c1c4 + # via -r requirements.in +zipp==3.8.1 \ + --hash=sha256:05b45f1ee8f807d0cc928485ca40a07cb491cf092ff587c0df9cb1fd154848d2 \ + --hash=sha256:47c40d7fe183a6f21403a199b3e4192cca5774656965b0a4988ad2f8feb5f009 + # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +setuptools==65.2.0 \ + --hash=sha256:7f4bc85450898a09f76ebf28b72fa25bc7111f6c7d665d514a60bba9c75ef2a9 \ + --hash=sha256:a3ca5857c89f82f5c9410e8508cb32f4872a3bafd4aa7ae122a24ca33bccc750 + # via -r requirements.in diff --git a/packages/db-dtypes/noxfile.py b/packages/db-dtypes/noxfile.py index a6ef0a7ed27e..3717f13b29f2 100644 --- a/packages/db-dtypes/noxfile.py +++ b/packages/db-dtypes/noxfile.py @@ -285,7 +285,9 @@ def unit_prerelease(session): def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. - session.install("--pre", "grpcio") + # Exclude version 1.49.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/pull/30642 + session.install("--pre", "grpcio!=1.49.0rc1") session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) @@ -430,7 +432,8 @@ def prerelease_deps(session): # Install all dependencies session.install("-e", ".[all, tests, tracing]") - session.install(*UNIT_TEST_STANDARD_DEPENDENCIES) + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) system_deps_all = ( SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES @@ -459,18 +462,13 @@ def prerelease_deps(session): session.install(*constraints_deps) - if os.path.exists("samples/snippets/requirements.txt"): - session.install("-r", "samples/snippets/requirements.txt") - - if os.path.exists("samples/snippets/requirements-test.txt"): - session.install("-r", "samples/snippets/requirements-test.txt") - prerel_deps = [ "protobuf", # dependency of grpc "six", "googleapis-common-protos", - "grpcio", + # Exclude version 1.49.0rc1 which has a known issue. See https://github.com/grpc/grpc/pull/30642 + "grpcio!=1.49.0rc1", "grpcio-status", "google-api-core", "proto-plus", @@ -501,11 +499,19 @@ def prerelease_deps(session): system_test_folder_path = os.path.join("tests", "system") # Only run system tests if found. - if os.path.exists(system_test_path) or os.path.exists(system_test_folder_path): - session.run("py.test", "tests/system") - - snippets_test_path = os.path.join("samples", "snippets") - - # Only run samples tests if found. - if os.path.exists(snippets_test_path): - session.run("py.test", "samples/snippets") + if os.path.exists(system_test_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if os.path.exists(system_test_folder_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) diff --git a/packages/db-dtypes/renovate.json b/packages/db-dtypes/renovate.json index c21036d385e5..566a70f3cc3c 100644 --- a/packages/db-dtypes/renovate.json +++ b/packages/db-dtypes/renovate.json @@ -5,7 +5,7 @@ ":preserveSemverRanges", ":disableDependencyDashboard" ], - "ignorePaths": [".pre-commit-config.yaml"], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt"], "pip_requirements": { "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] } From e2955e0e197cdf17d1961e8456f4548ef41e8536 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 2 Sep 2022 15:39:03 -0400 Subject: [PATCH 082/210] chore(python): exclude setup.py in renovate config (#143) Source-Link: https://github.com/googleapis/synthtool/commit/56da63e80c384a871356d1ea6640802017f213b4 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:993a058718e84a82fda04c3177e58f0a43281a996c7c395e0a56ccc4d6d210d7 Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 2 +- packages/db-dtypes/renovate.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 2fa0f7c4fe15..b8dcb4a4af99 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:94961fdc5c9ca6d13530a6a414a49d2f607203168215d074cdb0a1df9ec31c0b + digest: sha256:993a058718e84a82fda04c3177e58f0a43281a996c7c395e0a56ccc4d6d210d7 diff --git a/packages/db-dtypes/renovate.json b/packages/db-dtypes/renovate.json index 566a70f3cc3c..39b2a0ec9296 100644 --- a/packages/db-dtypes/renovate.json +++ b/packages/db-dtypes/renovate.json @@ -5,7 +5,7 @@ ":preserveSemverRanges", ":disableDependencyDashboard" ], - "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt"], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], "pip_requirements": { "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] } From 4b39b221aca05c78f0a03e722ae76b0a3e3a69a7 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 6 Sep 2022 18:28:07 +0200 Subject: [PATCH 083/210] chore(deps): update dependency pytest to v7.1.3 (#144) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- packages/db-dtypes/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/db-dtypes/samples/snippets/requirements-test.txt b/packages/db-dtypes/samples/snippets/requirements-test.txt index d00689e0623a..e07168502ea9 100644 --- a/packages/db-dtypes/samples/snippets/requirements-test.txt +++ b/packages/db-dtypes/samples/snippets/requirements-test.txt @@ -1 +1 @@ -pytest==7.1.2 +pytest==7.1.3 From 0a9eff7072c5b2a48d29dc3466adda7327d29928 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 13 Sep 2022 12:40:42 -0400 Subject: [PATCH 084/210] chore: detect samples tests in nested directories (#146) Source-Link: https://github.com/googleapis/synthtool/commit/50db768f450a50d7c1fd62513c113c9bb96fd434 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:e09366bdf0fd9c8976592988390b24d53583dd9f002d476934da43725adbb978 Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 2 +- packages/db-dtypes/samples/snippets/noxfile.py | 6 ++++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index b8dcb4a4af99..aa547962eb0a 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:993a058718e84a82fda04c3177e58f0a43281a996c7c395e0a56ccc4d6d210d7 + digest: sha256:e09366bdf0fd9c8976592988390b24d53583dd9f002d476934da43725adbb978 diff --git a/packages/db-dtypes/samples/snippets/noxfile.py b/packages/db-dtypes/samples/snippets/noxfile.py index 29b5bc852183..b053ca568f63 100644 --- a/packages/db-dtypes/samples/snippets/noxfile.py +++ b/packages/db-dtypes/samples/snippets/noxfile.py @@ -208,8 +208,10 @@ def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: # check for presence of tests - test_list = glob.glob("*_test.py") + glob.glob("test_*.py") - test_list.extend(glob.glob("tests")) + test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob( + "**/test_*.py", recursive=True + ) + test_list.extend(glob.glob("**/tests", recursive=True)) if len(test_list) == 0: print("No tests found, skipping directory.") From 213f50cd20627565916da9df8938032d563857d6 Mon Sep 17 00:00:00 2001 From: Jacob Hayes Date: Tue, 13 Sep 2022 18:58:05 +0200 Subject: [PATCH 085/210] fix(deps): Remove python version upper bound (#145) Co-authored-by: Anthonios Partheniou --- packages/db-dtypes/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/db-dtypes/setup.py b/packages/db-dtypes/setup.py index 7b4eb413a6a4..0c0a05ddec9b 100644 --- a/packages/db-dtypes/setup.py +++ b/packages/db-dtypes/setup.py @@ -71,6 +71,6 @@ def readme(): ], platforms="Posix; MacOS X; Windows", install_requires=dependencies, - python_requires=">=3.7, <3.11", + python_requires=">=3.7", tests_require=["pytest"], ) From 5598fb9b93e0e16b3874a5aad1f78b0b7c60c4ed Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Mon, 19 Sep 2022 11:21:15 -0400 Subject: [PATCH 086/210] fix: avoid out-of-range nanoseconds field in pandas 1.5.x (#148) --- packages/db-dtypes/db_dtypes/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/db-dtypes/db_dtypes/__init__.py b/packages/db-dtypes/db_dtypes/__init__.py index ca0b4680d2fa..2b51bcdbb2a0 100644 --- a/packages/db-dtypes/db_dtypes/__init__.py +++ b/packages/db-dtypes/db_dtypes/__init__.py @@ -150,7 +150,8 @@ def _datetime( hour=int(hour), minute=int(minute) if minute else 0, second=int(second) if second else 0, - nanosecond=nanosecond, + microsecond=nanosecond // 1000, + nanosecond=nanosecond % 1000, ).to_datetime64() else: raise TypeError("Invalid value type", scalar) From 59aa88e6901f0ca6baf9279a2b2de14e6897d4e8 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 20 Sep 2022 10:56:58 -0400 Subject: [PATCH 087/210] chore(main): release 1.0.4 (#147) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/db-dtypes/CHANGELOG.md | 8 ++++++++ packages/db-dtypes/db_dtypes/version.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/db-dtypes/CHANGELOG.md b/packages/db-dtypes/CHANGELOG.md index 53db3c1bc4c3..8a9b05e758b3 100644 --- a/packages/db-dtypes/CHANGELOG.md +++ b/packages/db-dtypes/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [1.0.4](https://github.com/googleapis/python-db-dtypes-pandas/compare/v1.0.3...v1.0.4) (2022-09-19) + + +### Bug Fixes + +* Avoid out-of-range nanoseconds field in pandas 1.5.x ([#148](https://github.com/googleapis/python-db-dtypes-pandas/issues/148)) ([2a477ca](https://github.com/googleapis/python-db-dtypes-pandas/commit/2a477ca42033867fbf76f0a818677b04d4d66f8f)) +* **deps:** Remove python version upper bound ([#145](https://github.com/googleapis/python-db-dtypes-pandas/issues/145)) ([a361806](https://github.com/googleapis/python-db-dtypes-pandas/commit/a361806026b0358270d101e9eff362d08a971076)) + ## [1.0.3](https://github.com/googleapis/python-db-dtypes-pandas/compare/v1.0.2...v1.0.3) (2022-08-05) diff --git a/packages/db-dtypes/db_dtypes/version.py b/packages/db-dtypes/db_dtypes/version.py index e2a9429ca141..fa2fdbccf89e 100644 --- a/packages/db-dtypes/db_dtypes/version.py +++ b/packages/db-dtypes/db_dtypes/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "1.0.3" +__version__ = "1.0.4" From b6568d7d7df4a25ad45f11a9cbf8d5b83ddd3916 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 3 Oct 2022 11:42:32 -0400 Subject: [PATCH 088/210] chore: update dependency protobuf >= 3.20.2 (#149) Source-Link: https://github.com/googleapis/synthtool/commit/f58d3135a2fab20e225d98741dbc06d57459b816 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:7a40313731a7cb1454eef6b33d3446ebb121836738dc3ab3d2d3ded5268c35b6 Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 2 +- packages/db-dtypes/.kokoro/requirements.txt | 49 ++++++++++---------- 2 files changed, 25 insertions(+), 26 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index aa547962eb0a..3815c983cb16 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:e09366bdf0fd9c8976592988390b24d53583dd9f002d476934da43725adbb978 + digest: sha256:7a40313731a7cb1454eef6b33d3446ebb121836738dc3ab3d2d3ded5268c35b6 diff --git a/packages/db-dtypes/.kokoro/requirements.txt b/packages/db-dtypes/.kokoro/requirements.txt index 385f2d4d6106..d15994bac93c 100644 --- a/packages/db-dtypes/.kokoro/requirements.txt +++ b/packages/db-dtypes/.kokoro/requirements.txt @@ -325,31 +325,30 @@ platformdirs==2.5.2 \ --hash=sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788 \ --hash=sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19 # via virtualenv -protobuf==3.20.1 \ - --hash=sha256:06059eb6953ff01e56a25cd02cca1a9649a75a7e65397b5b9b4e929ed71d10cf \ - --hash=sha256:097c5d8a9808302fb0da7e20edf0b8d4703274d140fd25c5edabddcde43e081f \ - --hash=sha256:284f86a6207c897542d7e956eb243a36bb8f9564c1742b253462386e96c6b78f \ - --hash=sha256:32ca378605b41fd180dfe4e14d3226386d8d1b002ab31c969c366549e66a2bb7 \ - --hash=sha256:3cc797c9d15d7689ed507b165cd05913acb992d78b379f6014e013f9ecb20996 \ - --hash=sha256:62f1b5c4cd6c5402b4e2d63804ba49a327e0c386c99b1675c8a0fefda23b2067 \ - --hash=sha256:69ccfdf3657ba59569c64295b7d51325f91af586f8d5793b734260dfe2e94e2c \ - --hash=sha256:6f50601512a3d23625d8a85b1638d914a0970f17920ff39cec63aaef80a93fb7 \ - --hash=sha256:7403941f6d0992d40161aa8bb23e12575637008a5a02283a930addc0508982f9 \ - --hash=sha256:755f3aee41354ae395e104d62119cb223339a8f3276a0cd009ffabfcdd46bb0c \ - --hash=sha256:77053d28427a29987ca9caf7b72ccafee011257561259faba8dd308fda9a8739 \ - --hash=sha256:7e371f10abe57cee5021797126c93479f59fccc9693dafd6bd5633ab67808a91 \ - --hash=sha256:9016d01c91e8e625141d24ec1b20fed584703e527d28512aa8c8707f105a683c \ - --hash=sha256:9be73ad47579abc26c12024239d3540e6b765182a91dbc88e23658ab71767153 \ - --hash=sha256:adc31566d027f45efe3f44eeb5b1f329da43891634d61c75a5944e9be6dd42c9 \ - --hash=sha256:adfc6cf69c7f8c50fd24c793964eef18f0ac321315439d94945820612849c388 \ - --hash=sha256:af0ebadc74e281a517141daad9d0f2c5d93ab78e9d455113719a45a49da9db4e \ - --hash=sha256:cb29edb9eab15742d791e1025dd7b6a8f6fcb53802ad2f6e3adcb102051063ab \ - --hash=sha256:cd68be2559e2a3b84f517fb029ee611546f7812b1fdd0aa2ecc9bc6ec0e4fdde \ - --hash=sha256:cdee09140e1cd184ba9324ec1df410e7147242b94b5f8b0c64fc89e38a8ba531 \ - --hash=sha256:db977c4ca738dd9ce508557d4fce0f5aebd105e158c725beec86feb1f6bc20d8 \ - --hash=sha256:dd5789b2948ca702c17027c84c2accb552fc30f4622a98ab5c51fcfe8c50d3e7 \ - --hash=sha256:e250a42f15bf9d5b09fe1b293bdba2801cd520a9f5ea2d7fb7536d4441811d20 \ - --hash=sha256:ff8d8fa42675249bb456f5db06c00de6c2f4c27a065955917b28c4f15978b9c3 +protobuf==3.20.2 \ + --hash=sha256:03d76b7bd42ac4a6e109742a4edf81ffe26ffd87c5993126d894fe48a120396a \ + --hash=sha256:09e25909c4297d71d97612f04f41cea8fa8510096864f2835ad2f3b3df5a5559 \ + --hash=sha256:18e34a10ae10d458b027d7638a599c964b030c1739ebd035a1dfc0e22baa3bfe \ + --hash=sha256:291fb4307094bf5ccc29f424b42268640e00d5240bf0d9b86bf3079f7576474d \ + --hash=sha256:2c0b040d0b5d5d207936ca2d02f00f765906622c07d3fa19c23a16a8ca71873f \ + --hash=sha256:384164994727f274cc34b8abd41a9e7e0562801361ee77437099ff6dfedd024b \ + --hash=sha256:3cb608e5a0eb61b8e00fe641d9f0282cd0eedb603be372f91f163cbfbca0ded0 \ + --hash=sha256:5d9402bf27d11e37801d1743eada54372f986a372ec9679673bfcc5c60441151 \ + --hash=sha256:712dca319eee507a1e7df3591e639a2b112a2f4a62d40fe7832a16fd19151750 \ + --hash=sha256:7a5037af4e76c975b88c3becdf53922b5ffa3f2cddf657574a4920a3b33b80f3 \ + --hash=sha256:8228e56a865c27163d5d1d1771d94b98194aa6917bcfb6ce139cbfa8e3c27334 \ + --hash=sha256:84a1544252a933ef07bb0b5ef13afe7c36232a774affa673fc3636f7cee1db6c \ + --hash=sha256:84fe5953b18a383fd4495d375fe16e1e55e0a3afe7b4f7b4d01a3a0649fcda9d \ + --hash=sha256:9c673c8bfdf52f903081816b9e0e612186684f4eb4c17eeb729133022d6032e3 \ + --hash=sha256:9f876a69ca55aed879b43c295a328970306e8e80a263ec91cf6e9189243c613b \ + --hash=sha256:a9e5ae5a8e8985c67e8944c23035a0dff2c26b0f5070b2f55b217a1c33bbe8b1 \ + --hash=sha256:b4fdb29c5a7406e3f7ef176b2a7079baa68b5b854f364c21abe327bbeec01cdb \ + --hash=sha256:c184485e0dfba4dfd451c3bd348c2e685d6523543a0f91b9fd4ae90eb09e8422 \ + --hash=sha256:c9cdf251c582c16fd6a9f5e95836c90828d51b0069ad22f463761d27c6c19019 \ + --hash=sha256:e39cf61bb8582bda88cdfebc0db163b774e7e03364bbf9ce1ead13863e81e359 \ + --hash=sha256:e8fbc522303e09036c752a0afcc5c0603e917222d8bedc02813fd73b4b4ed804 \ + --hash=sha256:f34464ab1207114e73bba0794d1257c150a2b89b7a9faf504e00af7c9fd58978 \ + --hash=sha256:f52dabc96ca99ebd2169dadbe018824ebda08a795c7684a0b7d203a290f3adb0 # via # gcp-docuploader # gcp-releasetool From b78b3ad842beb4d2fc5271cc1a5b53f615059567 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 26 Oct 2022 12:53:16 +0200 Subject: [PATCH 089/210] chore(deps): update dependency pytest to v7.2.0 (#151) --- packages/db-dtypes/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/db-dtypes/samples/snippets/requirements-test.txt b/packages/db-dtypes/samples/snippets/requirements-test.txt index e07168502ea9..49780e035690 100644 --- a/packages/db-dtypes/samples/snippets/requirements-test.txt +++ b/packages/db-dtypes/samples/snippets/requirements-test.txt @@ -1 +1 @@ -pytest==7.1.3 +pytest==7.2.0 From 7ccd46a620196d89d8f53dbdbc29c8c736e651a3 Mon Sep 17 00:00:00 2001 From: Chalmer Lowe Date: Thu, 17 Nov 2022 12:17:39 -0500 Subject: [PATCH 090/210] chore: Removes upper bound for pyarrow (#155) --- packages/db-dtypes/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/db-dtypes/setup.py b/packages/db-dtypes/setup.py index 0c0a05ddec9b..ba6388fb920c 100644 --- a/packages/db-dtypes/setup.py +++ b/packages/db-dtypes/setup.py @@ -31,7 +31,7 @@ dependencies = [ "packaging >= 17.0", "pandas >= 0.24.2, < 2.0dev", - "pyarrow>=3.0.0, <10.0dev", + "pyarrow>=3.0.0", "numpy >= 1.16.6, < 2.0dev", ] From 627eb0340ae62721143e59937a21ee100a357f07 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 19 Nov 2022 11:10:45 -0500 Subject: [PATCH 091/210] chore(python): update release script dependencies (#154) Source-Link: https://github.com/googleapis/synthtool/commit/25083af347468dd5f90f69627420f7d452b6c50e Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:e6cbd61f1838d9ff6a31436dfc13717f372a7482a82fc1863ca954ec47bff8c8 Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 2 +- packages/db-dtypes/.github/workflows/docs.yml | 4 +- packages/db-dtypes/.github/workflows/lint.yml | 2 +- .../db-dtypes/.github/workflows/unittest.yml | 2 +- .../db-dtypes/.kokoro/docker/docs/Dockerfile | 12 +- packages/db-dtypes/.kokoro/requirements.in | 4 +- packages/db-dtypes/.kokoro/requirements.txt | 354 ++++++++++-------- packages/db-dtypes/noxfile.py | 15 +- 8 files changed, 218 insertions(+), 177 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 3815c983cb16..3f1ccc085ef7 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:7a40313731a7cb1454eef6b33d3446ebb121836738dc3ab3d2d3ded5268c35b6 + digest: sha256:e6cbd61f1838d9ff6a31436dfc13717f372a7482a82fc1863ca954ec47bff8c8 diff --git a/packages/db-dtypes/.github/workflows/docs.yml b/packages/db-dtypes/.github/workflows/docs.yml index 7092a139aed3..e97d89e484c9 100644 --- a/packages/db-dtypes/.github/workflows/docs.yml +++ b/packages/db-dtypes/.github/workflows/docs.yml @@ -12,7 +12,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.9" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel @@ -28,7 +28,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.9" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel diff --git a/packages/db-dtypes/.github/workflows/lint.yml b/packages/db-dtypes/.github/workflows/lint.yml index d2aee5b7d8ec..16d5a9e90f6d 100644 --- a/packages/db-dtypes/.github/workflows/lint.yml +++ b/packages/db-dtypes/.github/workflows/lint.yml @@ -12,7 +12,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.8" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel diff --git a/packages/db-dtypes/.github/workflows/unittest.yml b/packages/db-dtypes/.github/workflows/unittest.yml index 87ade4d54362..23000c05d9d8 100644 --- a/packages/db-dtypes/.github/workflows/unittest.yml +++ b/packages/db-dtypes/.github/workflows/unittest.yml @@ -41,7 +41,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.8" - name: Install coverage run: | python -m pip install --upgrade setuptools pip wheel diff --git a/packages/db-dtypes/.kokoro/docker/docs/Dockerfile b/packages/db-dtypes/.kokoro/docker/docs/Dockerfile index 238b87b9d1c9..f8137d0ae497 100644 --- a/packages/db-dtypes/.kokoro/docker/docs/Dockerfile +++ b/packages/db-dtypes/.kokoro/docker/docs/Dockerfile @@ -60,16 +60,16 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* \ && rm -f /var/cache/apt/archives/*.deb -###################### Install python 3.8.11 +###################### Install python 3.9.13 -# Download python 3.8.11 -RUN wget https://www.python.org/ftp/python/3.8.11/Python-3.8.11.tgz +# Download python 3.9.13 +RUN wget https://www.python.org/ftp/python/3.9.13/Python-3.9.13.tgz # Extract files -RUN tar -xvf Python-3.8.11.tgz +RUN tar -xvf Python-3.9.13.tgz -# Install python 3.8.11 -RUN ./Python-3.8.11/configure --enable-optimizations +# Install python 3.9.13 +RUN ./Python-3.9.13/configure --enable-optimizations RUN make altinstall ###################### Install pip diff --git a/packages/db-dtypes/.kokoro/requirements.in b/packages/db-dtypes/.kokoro/requirements.in index 7718391a34d7..cbd7e77f44db 100644 --- a/packages/db-dtypes/.kokoro/requirements.in +++ b/packages/db-dtypes/.kokoro/requirements.in @@ -5,4 +5,6 @@ typing-extensions twine wheel setuptools -nox \ No newline at end of file +nox +charset-normalizer<3 +click<8.1.0 diff --git a/packages/db-dtypes/.kokoro/requirements.txt b/packages/db-dtypes/.kokoro/requirements.txt index d15994bac93c..9c1b9be34e6b 100644 --- a/packages/db-dtypes/.kokoro/requirements.txt +++ b/packages/db-dtypes/.kokoro/requirements.txt @@ -20,9 +20,9 @@ cachetools==5.2.0 \ --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db # via google-auth -certifi==2022.6.15 \ - --hash=sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d \ - --hash=sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412 +certifi==2022.9.24 \ + --hash=sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14 \ + --hash=sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382 # via requests cffi==1.15.1 \ --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ @@ -93,11 +93,14 @@ cffi==1.15.1 \ charset-normalizer==2.1.1 \ --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f - # via requests + # via + # -r requirements.in + # requests click==8.0.4 \ --hash=sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1 \ --hash=sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb # via + # -r requirements.in # gcp-docuploader # gcp-releasetool colorlog==6.7.0 \ @@ -110,29 +113,33 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==37.0.4 \ - --hash=sha256:190f82f3e87033821828f60787cfa42bff98404483577b591429ed99bed39d59 \ - --hash=sha256:2be53f9f5505673eeda5f2736bea736c40f051a739bfae2f92d18aed1eb54596 \ - --hash=sha256:30788e070800fec9bbcf9faa71ea6d8068f5136f60029759fd8c3efec3c9dcb3 \ - --hash=sha256:3d41b965b3380f10e4611dbae366f6dc3cefc7c9ac4e8842a806b9672ae9add5 \ - --hash=sha256:4c590ec31550a724ef893c50f9a97a0c14e9c851c85621c5650d699a7b88f7ab \ - --hash=sha256:549153378611c0cca1042f20fd9c5030d37a72f634c9326e225c9f666d472884 \ - --hash=sha256:63f9c17c0e2474ccbebc9302ce2f07b55b3b3fcb211ded18a42d5764f5c10a82 \ - --hash=sha256:6bc95ed67b6741b2607298f9ea4932ff157e570ef456ef7ff0ef4884a134cc4b \ - --hash=sha256:7099a8d55cd49b737ffc99c17de504f2257e3787e02abe6d1a6d136574873441 \ - --hash=sha256:75976c217f10d48a8b5a8de3d70c454c249e4b91851f6838a4e48b8f41eb71aa \ - --hash=sha256:7bc997818309f56c0038a33b8da5c0bfbb3f1f067f315f9abd6fc07ad359398d \ - --hash=sha256:80f49023dd13ba35f7c34072fa17f604d2f19bf0989f292cedf7ab5770b87a0b \ - --hash=sha256:91ce48d35f4e3d3f1d83e29ef4a9267246e6a3be51864a5b7d2247d5086fa99a \ - --hash=sha256:a958c52505c8adf0d3822703078580d2c0456dd1d27fabfb6f76fe63d2971cd6 \ - --hash=sha256:b62439d7cd1222f3da897e9a9fe53bbf5c104fff4d60893ad1355d4c14a24157 \ - --hash=sha256:b7f8dd0d4c1f21759695c05a5ec8536c12f31611541f8904083f3dc582604280 \ - --hash=sha256:d204833f3c8a33bbe11eda63a54b1aad7aa7456ed769a982f21ec599ba5fa282 \ - --hash=sha256:e007f052ed10cc316df59bc90fbb7ff7950d7e2919c9757fd42a2b8ecf8a5f67 \ - --hash=sha256:f2dcb0b3b63afb6df7fd94ec6fbddac81b5492513f7b0436210d390c14d46ee8 \ - --hash=sha256:f721d1885ecae9078c3f6bbe8a88bc0786b6e749bf32ccec1ef2b18929a05046 \ - --hash=sha256:f7a6de3e98771e183645181b3627e2563dcde3ce94a9e42a3f427d2255190327 \ - --hash=sha256:f8c0a6e9e1dd3eb0414ba320f85da6b0dcbd543126e30fcc546e7372a7fbf3b9 +cryptography==38.0.3 \ + --hash=sha256:068147f32fa662c81aebab95c74679b401b12b57494872886eb5c1139250ec5d \ + --hash=sha256:06fc3cc7b6f6cca87bd56ec80a580c88f1da5306f505876a71c8cfa7050257dd \ + --hash=sha256:25c1d1f19729fb09d42e06b4bf9895212292cb27bb50229f5aa64d039ab29146 \ + --hash=sha256:402852a0aea73833d982cabb6d0c3bb582c15483d29fb7085ef2c42bfa7e38d7 \ + --hash=sha256:4e269dcd9b102c5a3d72be3c45d8ce20377b8076a43cbed6f660a1afe365e436 \ + --hash=sha256:5419a127426084933076132d317911e3c6eb77568a1ce23c3ac1e12d111e61e0 \ + --hash=sha256:554bec92ee7d1e9d10ded2f7e92a5d70c1f74ba9524947c0ba0c850c7b011828 \ + --hash=sha256:5e89468fbd2fcd733b5899333bc54d0d06c80e04cd23d8c6f3e0542358c6060b \ + --hash=sha256:65535bc550b70bd6271984d9863a37741352b4aad6fb1b3344a54e6950249b55 \ + --hash=sha256:6ab9516b85bebe7aa83f309bacc5f44a61eeb90d0b4ec125d2d003ce41932d36 \ + --hash=sha256:6addc3b6d593cd980989261dc1cce38263c76954d758c3c94de51f1e010c9a50 \ + --hash=sha256:728f2694fa743a996d7784a6194da430f197d5c58e2f4e278612b359f455e4a2 \ + --hash=sha256:785e4056b5a8b28f05a533fab69febf5004458e20dad7e2e13a3120d8ecec75a \ + --hash=sha256:78cf5eefac2b52c10398a42765bfa981ce2372cbc0457e6bf9658f41ec3c41d8 \ + --hash=sha256:7f836217000342d448e1c9a342e9163149e45d5b5eca76a30e84503a5a96cab0 \ + --hash=sha256:8d41a46251bf0634e21fac50ffd643216ccecfaf3701a063257fe0b2be1b6548 \ + --hash=sha256:984fe150f350a3c91e84de405fe49e688aa6092b3525f407a18b9646f6612320 \ + --hash=sha256:9b24bcff7853ed18a63cfb0c2b008936a9554af24af2fb146e16d8e1aed75748 \ + --hash=sha256:b1b35d9d3a65542ed2e9d90115dfd16bbc027b3f07ee3304fc83580f26e43249 \ + --hash=sha256:b1b52c9e5f8aa2b802d48bd693190341fae201ea51c7a167d69fc48b60e8a959 \ + --hash=sha256:bbf203f1a814007ce24bd4d51362991d5cb90ba0c177a9c08825f2cc304d871f \ + --hash=sha256:be243c7e2bfcf6cc4cb350c0d5cdf15ca6383bbcb2a8ef51d3c9411a9d4386f0 \ + --hash=sha256:bfbe6ee19615b07a98b1d2287d6a6073f734735b49ee45b11324d85efc4d5cbd \ + --hash=sha256:c46837ea467ed1efea562bbeb543994c2d1f6e800785bd5a2c98bc096f5cb220 \ + --hash=sha256:dfb4f4dd568de1b6af9f4cda334adf7d72cf5bc052516e1b2608b683375dd95c \ + --hash=sha256:ed7b00096790213e09eb11c97cc6e2b757f15f3d2f85833cd2d3ec3fe37c1722 # via # gcp-releasetool # secretstorage @@ -148,23 +155,23 @@ filelock==3.8.0 \ --hash=sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc \ --hash=sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4 # via virtualenv -gcp-docuploader==0.6.3 \ - --hash=sha256:ba8c9d76b3bbac54b0311c503a373b00edc2dc02d6d54ea9507045adb8e870f7 \ - --hash=sha256:c0f5aaa82ce1854a386197e4e359b120ad6d4e57ae2c812fce42219a3288026b +gcp-docuploader==0.6.4 \ + --hash=sha256:01486419e24633af78fd0167db74a2763974765ee8078ca6eb6964d0ebd388af \ + --hash=sha256:70861190c123d907b3b067da896265ead2eeb9263969d6955c9e0bb091b5ccbf # via -r requirements.in -gcp-releasetool==1.8.7 \ - --hash=sha256:3d2a67c9db39322194afb3b427e9cb0476ce8f2a04033695f0aeb63979fc2b37 \ - --hash=sha256:5e4d28f66e90780d77f3ecf1e9155852b0c3b13cbccb08ab07e66b2357c8da8d +gcp-releasetool==1.10.0 \ + --hash=sha256:72a38ca91b59c24f7e699e9227c90cbe4dd71b789383cb0164b088abae294c83 \ + --hash=sha256:8c7c99320208383d4bb2b808c6880eb7a81424afe7cdba3c8d84b25f4f0e097d # via -r requirements.in -google-api-core==2.8.2 \ - --hash=sha256:06f7244c640322b508b125903bb5701bebabce8832f85aba9335ec00b3d02edc \ - --hash=sha256:93c6a91ccac79079ac6bbf8b74ee75db970cc899278b97d53bc012f35908cf50 +google-api-core==2.10.2 \ + --hash=sha256:10c06f7739fe57781f87523375e8e1a3a4674bf6392cd6131a3222182b971320 \ + --hash=sha256:34f24bd1d5f72a8c4519773d99ca6bf080a6c4e041b4e9f024fe230191dda62e # via # google-cloud-core # google-cloud-storage -google-auth==2.11.0 \ - --hash=sha256:be62acaae38d0049c21ca90f27a23847245c9f161ff54ede13af2cb6afecbac9 \ - --hash=sha256:ed65ecf9f681832298e29328e1ef0a3676e3732b2e56f41532d45f70a22de0fb +google-auth==2.14.1 \ + --hash=sha256:ccaa901f31ad5cbb562615eb8b664b3dd0bf5404a67618e642307f00613eda4d \ + --hash=sha256:f5d8701633bebc12e0deea4df8abd8aff31c28b355360597f7f2ee60f2e4d016 # via # gcp-releasetool # google-api-core @@ -174,76 +181,102 @@ google-cloud-core==2.3.2 \ --hash=sha256:8417acf6466be2fa85123441696c4badda48db314c607cf1e5d543fa8bdc22fe \ --hash=sha256:b9529ee7047fd8d4bf4a2182de619154240df17fbe60ead399078c1ae152af9a # via google-cloud-storage -google-cloud-storage==2.5.0 \ - --hash=sha256:19a26c66c317ce542cea0830b7e787e8dac2588b6bfa4d3fd3b871ba16305ab0 \ - --hash=sha256:382f34b91de2212e3c2e7b40ec079d27ee2e3dbbae99b75b1bcd8c63063ce235 +google-cloud-storage==2.6.0 \ + --hash=sha256:104ca28ae61243b637f2f01455cc8a05e8f15a2a18ced96cb587241cdd3820f5 \ + --hash=sha256:4ad0415ff61abdd8bb2ae81c1f8f7ec7d91a1011613f2db87c614c550f97bfe9 # via gcp-docuploader -google-crc32c==1.3.0 \ - --hash=sha256:04e7c220798a72fd0f08242bc8d7a05986b2a08a0573396187fd32c1dcdd58b3 \ - --hash=sha256:05340b60bf05b574159e9bd940152a47d38af3fb43803ffe71f11d704b7696a6 \ - --hash=sha256:12674a4c3b56b706153a358eaa1018c4137a5a04635b92b4652440d3d7386206 \ - --hash=sha256:127f9cc3ac41b6a859bd9dc4321097b1a4f6aa7fdf71b4f9227b9e3ebffb4422 \ - --hash=sha256:13af315c3a0eec8bb8b8d80b8b128cb3fcd17d7e4edafc39647846345a3f003a \ - --hash=sha256:1926fd8de0acb9d15ee757175ce7242e235482a783cd4ec711cc999fc103c24e \ - --hash=sha256:226f2f9b8e128a6ca6a9af9b9e8384f7b53a801907425c9a292553a3a7218ce0 \ - --hash=sha256:276de6273eb074a35bc598f8efbc00c7869c5cf2e29c90748fccc8c898c244df \ - --hash=sha256:318f73f5484b5671f0c7f5f63741ab020a599504ed81d209b5c7129ee4667407 \ - --hash=sha256:3bbce1be3687bbfebe29abdb7631b83e6b25da3f4e1856a1611eb21854b689ea \ - --hash=sha256:42ae4781333e331a1743445931b08ebdad73e188fd554259e772556fc4937c48 \ - --hash=sha256:58be56ae0529c664cc04a9c76e68bb92b091e0194d6e3c50bea7e0f266f73713 \ - --hash=sha256:5da2c81575cc3ccf05d9830f9e8d3c70954819ca9a63828210498c0774fda1a3 \ - --hash=sha256:6311853aa2bba4064d0c28ca54e7b50c4d48e3de04f6770f6c60ebda1e975267 \ - --hash=sha256:650e2917660e696041ab3dcd7abac160b4121cd9a484c08406f24c5964099829 \ - --hash=sha256:6a4db36f9721fdf391646685ecffa404eb986cbe007a3289499020daf72e88a2 \ - --hash=sha256:779cbf1ce375b96111db98fca913c1f5ec11b1d870e529b1dc7354b2681a8c3a \ - --hash=sha256:7f6fe42536d9dcd3e2ffb9d3053f5d05221ae3bbcefbe472bdf2c71c793e3183 \ - --hash=sha256:891f712ce54e0d631370e1f4997b3f182f3368179198efc30d477c75d1f44942 \ - --hash=sha256:95c68a4b9b7828ba0428f8f7e3109c5d476ca44996ed9a5f8aac6269296e2d59 \ - --hash=sha256:96a8918a78d5d64e07c8ea4ed2bc44354e3f93f46a4866a40e8db934e4c0d74b \ - --hash=sha256:9c3cf890c3c0ecfe1510a452a165431b5831e24160c5fcf2071f0f85ca5a47cd \ - --hash=sha256:9f58099ad7affc0754ae42e6d87443299f15d739b0ce03c76f515153a5cda06c \ - --hash=sha256:a0b9e622c3b2b8d0ce32f77eba617ab0d6768b82836391e4f8f9e2074582bf02 \ - --hash=sha256:a7f9cbea4245ee36190f85fe1814e2d7b1e5f2186381b082f5d59f99b7f11328 \ - --hash=sha256:bab4aebd525218bab4ee615786c4581952eadc16b1ff031813a2fd51f0cc7b08 \ - --hash=sha256:c124b8c8779bf2d35d9b721e52d4adb41c9bfbde45e6a3f25f0820caa9aba73f \ - --hash=sha256:c9da0a39b53d2fab3e5467329ed50e951eb91386e9d0d5b12daf593973c3b168 \ - --hash=sha256:ca60076c388728d3b6ac3846842474f4250c91efbfe5afa872d3ffd69dd4b318 \ - --hash=sha256:cb6994fff247987c66a8a4e550ef374671c2b82e3c0d2115e689d21e511a652d \ - --hash=sha256:d1c1d6236feab51200272d79b3d3e0f12cf2cbb12b208c835b175a21efdb0a73 \ - --hash=sha256:dd7760a88a8d3d705ff562aa93f8445ead54f58fd482e4f9e2bafb7e177375d4 \ - --hash=sha256:dda4d8a3bb0b50f540f6ff4b6033f3a74e8bf0bd5320b70fab2c03e512a62812 \ - --hash=sha256:e0f1ff55dde0ebcfbef027edc21f71c205845585fffe30d4ec4979416613e9b3 \ - --hash=sha256:e7a539b9be7b9c00f11ef16b55486141bc2cdb0c54762f84e3c6fc091917436d \ - --hash=sha256:eb0b14523758e37802f27b7f8cd973f5f3d33be7613952c0df904b68c4842f0e \ - --hash=sha256:ed447680ff21c14aaceb6a9f99a5f639f583ccfe4ce1a5e1d48eb41c3d6b3217 \ - --hash=sha256:f52a4ad2568314ee713715b1e2d79ab55fab11e8b304fd1462ff5cccf4264b3e \ - --hash=sha256:fbd60c6aaa07c31d7754edbc2334aef50601b7f1ada67a96eb1eb57c7c72378f \ - --hash=sha256:fc28e0db232c62ca0c3600884933178f0825c99be4474cdd645e378a10588125 \ - --hash=sha256:fe31de3002e7b08eb20823b3735b97c86c5926dd0581c7710a680b418a8709d4 \ - --hash=sha256:fec221a051150eeddfdfcff162e6db92c65ecf46cb0f7bb1bf812a1520ec026b \ - --hash=sha256:ff71073ebf0e42258a42a0b34f2c09ec384977e7f6808999102eedd5b49920e3 +google-crc32c==1.5.0 \ + --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ + --hash=sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876 \ + --hash=sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c \ + --hash=sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289 \ + --hash=sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298 \ + --hash=sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02 \ + --hash=sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f \ + --hash=sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2 \ + --hash=sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a \ + --hash=sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb \ + --hash=sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210 \ + --hash=sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5 \ + --hash=sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee \ + --hash=sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c \ + --hash=sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a \ + --hash=sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314 \ + --hash=sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd \ + --hash=sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65 \ + --hash=sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37 \ + --hash=sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4 \ + --hash=sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13 \ + --hash=sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894 \ + --hash=sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31 \ + --hash=sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e \ + --hash=sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709 \ + --hash=sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740 \ + --hash=sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc \ + --hash=sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d \ + --hash=sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c \ + --hash=sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c \ + --hash=sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d \ + --hash=sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906 \ + --hash=sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61 \ + --hash=sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57 \ + --hash=sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c \ + --hash=sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a \ + --hash=sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438 \ + --hash=sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946 \ + --hash=sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7 \ + --hash=sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96 \ + --hash=sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091 \ + --hash=sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae \ + --hash=sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d \ + --hash=sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88 \ + --hash=sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2 \ + --hash=sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd \ + --hash=sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541 \ + --hash=sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728 \ + --hash=sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178 \ + --hash=sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968 \ + --hash=sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346 \ + --hash=sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8 \ + --hash=sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93 \ + --hash=sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7 \ + --hash=sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273 \ + --hash=sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462 \ + --hash=sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94 \ + --hash=sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd \ + --hash=sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e \ + --hash=sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57 \ + --hash=sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b \ + --hash=sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9 \ + --hash=sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a \ + --hash=sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100 \ + --hash=sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325 \ + --hash=sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183 \ + --hash=sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556 \ + --hash=sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4 # via google-resumable-media -google-resumable-media==2.3.3 \ - --hash=sha256:27c52620bd364d1c8116eaac4ea2afcbfb81ae9139fb3199652fcac1724bfb6c \ - --hash=sha256:5b52774ea7a829a8cdaa8bd2d4c3d4bc660c91b30857ab2668d0eb830f4ea8c5 +google-resumable-media==2.4.0 \ + --hash=sha256:2aa004c16d295c8f6c33b2b4788ba59d366677c0a25ae7382436cb30f776deaa \ + --hash=sha256:8d5518502f92b9ecc84ac46779bd4f09694ecb3ba38a3e7ca737a86d15cbca1f # via google-cloud-storage -googleapis-common-protos==1.56.4 \ - --hash=sha256:8eb2cbc91b69feaf23e32452a7ae60e791e09967d81d4fcc7fc388182d1bd394 \ - --hash=sha256:c25873c47279387cfdcbdafa36149887901d36202cb645a0e4f29686bf6e4417 +googleapis-common-protos==1.57.0 \ + --hash=sha256:27a849d6205838fb6cc3c1c21cb9800707a661bb21c6ce7fb13e99eb1f8a0c46 \ + --hash=sha256:a9f4a1d7f6d9809657b7f1316a1aa527f6664891531bcfcc13b6696e685f443c # via google-api-core -idna==3.3 \ - --hash=sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff \ - --hash=sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d +idna==3.4 \ + --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ + --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 # via requests -importlib-metadata==4.12.0 \ - --hash=sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670 \ - --hash=sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23 +importlib-metadata==5.0.0 \ + --hash=sha256:da31db32b304314d044d3c12c79bd59e307889b287ad12ff387b3500835fc2ab \ + --hash=sha256:ddb0e35065e8938f867ed4928d0ae5bf2a53b7773871bfe6bcc7e4fcdc7dea43 # via # -r requirements.in + # keyring # twine -jaraco-classes==3.2.2 \ - --hash=sha256:6745f113b0b588239ceb49532aa09c3ebb947433ce311ef2f8e3ad64ebb74594 \ - --hash=sha256:e6ef6fd3fcf4579a7a019d87d1e56a883f4e4c35cfe925f86731abc58804e647 +jaraco-classes==3.2.3 \ + --hash=sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158 \ + --hash=sha256:89559fa5c1d3c34eff6f631ad80bb21f378dbcbb35dd161fd2c6b93f5be2f98a # via keyring jeepney==0.8.0 \ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ @@ -255,9 +288,9 @@ jinja2==3.1.2 \ --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 # via gcp-releasetool -keyring==23.9.0 \ - --hash=sha256:4c32a31174faaee48f43a7e2c7e9c3216ec5e95acf22a2bebfb4a1d05056ee44 \ - --hash=sha256:98f060ec95ada2ab910c195a2d4317be6ef87936a766b239c46aa3c7aac4f0db +keyring==23.11.0 \ + --hash=sha256:3dd30011d555f1345dec2c262f0153f2f0ca6bca041fb1dc4588349bb4c0ac1e \ + --hash=sha256:ad192263e2cdd5f12875dedc2da13534359a7e760e77f8d04b50968a821c2361 # via # gcp-releasetool # twine @@ -303,9 +336,9 @@ markupsafe==2.1.1 \ --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 # via jinja2 -more-itertools==8.14.0 \ - --hash=sha256:1bc4f91ee5b1b31ac7ceacc17c09befe6a40a503907baf9c839c229b5095cfd2 \ - --hash=sha256:c09443cd3d5438b8dafccd867a6bc1cb0894389e90cb53d227456b0b0bccb750 +more-itertools==9.0.0 \ + --hash=sha256:250e83d7e81d0c87ca6bd942e6aeab8cc9daa6096d12c5308f3f92fa5e5c1f41 \ + --hash=sha256:5a6257e40878ef0520b1803990e3e22303a41b5714006c32a3fd8304b26ea1ab # via jaraco-classes nox==2022.8.7 \ --hash=sha256:1b894940551dc5c389f9271d197ca5d655d40bdc6ccf93ed6880e4042760a34b \ @@ -321,34 +354,33 @@ pkginfo==1.8.3 \ --hash=sha256:848865108ec99d4901b2f7e84058b6e7660aae8ae10164e015a6dcf5b242a594 \ --hash=sha256:a84da4318dd86f870a9447a8c98340aa06216bfc6f2b7bdc4b8766984ae1867c # via twine -platformdirs==2.5.2 \ - --hash=sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788 \ - --hash=sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19 +platformdirs==2.5.4 \ + --hash=sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7 \ + --hash=sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10 # via virtualenv -protobuf==3.20.2 \ - --hash=sha256:03d76b7bd42ac4a6e109742a4edf81ffe26ffd87c5993126d894fe48a120396a \ - --hash=sha256:09e25909c4297d71d97612f04f41cea8fa8510096864f2835ad2f3b3df5a5559 \ - --hash=sha256:18e34a10ae10d458b027d7638a599c964b030c1739ebd035a1dfc0e22baa3bfe \ - --hash=sha256:291fb4307094bf5ccc29f424b42268640e00d5240bf0d9b86bf3079f7576474d \ - --hash=sha256:2c0b040d0b5d5d207936ca2d02f00f765906622c07d3fa19c23a16a8ca71873f \ - --hash=sha256:384164994727f274cc34b8abd41a9e7e0562801361ee77437099ff6dfedd024b \ - --hash=sha256:3cb608e5a0eb61b8e00fe641d9f0282cd0eedb603be372f91f163cbfbca0ded0 \ - --hash=sha256:5d9402bf27d11e37801d1743eada54372f986a372ec9679673bfcc5c60441151 \ - --hash=sha256:712dca319eee507a1e7df3591e639a2b112a2f4a62d40fe7832a16fd19151750 \ - --hash=sha256:7a5037af4e76c975b88c3becdf53922b5ffa3f2cddf657574a4920a3b33b80f3 \ - --hash=sha256:8228e56a865c27163d5d1d1771d94b98194aa6917bcfb6ce139cbfa8e3c27334 \ - --hash=sha256:84a1544252a933ef07bb0b5ef13afe7c36232a774affa673fc3636f7cee1db6c \ - --hash=sha256:84fe5953b18a383fd4495d375fe16e1e55e0a3afe7b4f7b4d01a3a0649fcda9d \ - --hash=sha256:9c673c8bfdf52f903081816b9e0e612186684f4eb4c17eeb729133022d6032e3 \ - --hash=sha256:9f876a69ca55aed879b43c295a328970306e8e80a263ec91cf6e9189243c613b \ - --hash=sha256:a9e5ae5a8e8985c67e8944c23035a0dff2c26b0f5070b2f55b217a1c33bbe8b1 \ - --hash=sha256:b4fdb29c5a7406e3f7ef176b2a7079baa68b5b854f364c21abe327bbeec01cdb \ - --hash=sha256:c184485e0dfba4dfd451c3bd348c2e685d6523543a0f91b9fd4ae90eb09e8422 \ - --hash=sha256:c9cdf251c582c16fd6a9f5e95836c90828d51b0069ad22f463761d27c6c19019 \ - --hash=sha256:e39cf61bb8582bda88cdfebc0db163b774e7e03364bbf9ce1ead13863e81e359 \ - --hash=sha256:e8fbc522303e09036c752a0afcc5c0603e917222d8bedc02813fd73b4b4ed804 \ - --hash=sha256:f34464ab1207114e73bba0794d1257c150a2b89b7a9faf504e00af7c9fd58978 \ - --hash=sha256:f52dabc96ca99ebd2169dadbe018824ebda08a795c7684a0b7d203a290f3adb0 +protobuf==3.20.3 \ + --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \ + --hash=sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c \ + --hash=sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2 \ + --hash=sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b \ + --hash=sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050 \ + --hash=sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9 \ + --hash=sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7 \ + --hash=sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454 \ + --hash=sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480 \ + --hash=sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469 \ + --hash=sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c \ + --hash=sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e \ + --hash=sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db \ + --hash=sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905 \ + --hash=sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b \ + --hash=sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86 \ + --hash=sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4 \ + --hash=sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402 \ + --hash=sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7 \ + --hash=sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4 \ + --hash=sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99 \ + --hash=sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee # via # gcp-docuploader # gcp-releasetool @@ -377,9 +409,9 @@ pygments==2.13.0 \ # via # readme-renderer # rich -pyjwt==2.4.0 \ - --hash=sha256:72d1d253f32dbd4f5c88eaf1fdc62f3a19f676ccbadb9dbc5d07e951b2b26daf \ - --hash=sha256:d42908208c699b3b973cbeb01a969ba6a96c821eefb1c5bfe4c390c01d67abba +pyjwt==2.6.0 \ + --hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \ + --hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14 # via gcp-releasetool pyparsing==3.0.9 \ --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ @@ -392,9 +424,9 @@ python-dateutil==2.8.2 \ --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 # via gcp-releasetool -readme-renderer==37.0 \ - --hash=sha256:07b7ea234e03e58f77cc222e206e6abb8f4c0435becce5104794ee591f9301c5 \ - --hash=sha256:9fa416704703e509eeb900696751c908ddeb2011319d93700d8f18baff887a69 +readme-renderer==37.3 \ + --hash=sha256:cd653186dfc73055656f090f227f5cb22a046d7f71a841dfa305f55c9a513273 \ + --hash=sha256:f67a16caedfa71eef48a31b39708637a6f4664c4394801a7b0d6432d13907343 # via twine requests==2.28.1 \ --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \ @@ -405,17 +437,17 @@ requests==2.28.1 \ # google-cloud-storage # requests-toolbelt # twine -requests-toolbelt==0.9.1 \ - --hash=sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f \ - --hash=sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0 +requests-toolbelt==0.10.1 \ + --hash=sha256:18565aa58116d9951ac39baa288d3adb5b3ff975c4f25eee78555d89e8f247f7 \ + --hash=sha256:62e09f7ff5ccbda92772a29f394a49c3ad6cb181d568b1337626b2abb628a63d # via twine rfc3986==2.0.0 \ --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c # via twine -rich==12.5.1 \ - --hash=sha256:2eb4e6894cde1e017976d2975ac210ef515d7548bc595ba20e195fb9628acdeb \ - --hash=sha256:63a5c5ce3673d3d5fbbf23cd87e11ab84b6b451436f1b7f19ec54b6bc36ed7ca +rich==12.6.0 \ + --hash=sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e \ + --hash=sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0 # via twine rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ @@ -437,9 +469,9 @@ twine==4.0.1 \ --hash=sha256:42026c18e394eac3e06693ee52010baa5313e4811d5a11050e7d48436cf41b9e \ --hash=sha256:96b1cf12f7ae611a4a40b6ae8e9570215daff0611828f5fe1f37a16255ab24a0 # via -r requirements.in -typing-extensions==4.3.0 \ - --hash=sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02 \ - --hash=sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6 +typing-extensions==4.4.0 \ + --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ + --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e # via -r requirements.in urllib3==1.26.12 \ --hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \ @@ -447,25 +479,25 @@ urllib3==1.26.12 \ # via # requests # twine -virtualenv==20.16.4 \ - --hash=sha256:014f766e4134d0008dcaa1f95bafa0fb0f575795d07cae50b1bee514185d6782 \ - --hash=sha256:035ed57acce4ac35c82c9d8802202b0e71adac011a511ff650cbcf9635006a22 +virtualenv==20.16.7 \ + --hash=sha256:8691e3ff9387f743e00f6bb20f70121f5e4f596cae754531f2b3b3a1b1ac696e \ + --hash=sha256:efd66b00386fdb7dbe4822d172303f40cd05e50e01740b19ea42425cbe653e29 # via nox webencodings==0.5.1 \ --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923 # via bleach -wheel==0.37.1 \ - --hash=sha256:4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a \ - --hash=sha256:e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d49529c1c4 +wheel==0.38.4 \ + --hash=sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac \ + --hash=sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8 # via -r requirements.in -zipp==3.8.1 \ - --hash=sha256:05b45f1ee8f807d0cc928485ca40a07cb491cf092ff587c0df9cb1fd154848d2 \ - --hash=sha256:47c40d7fe183a6f21403a199b3e4192cca5774656965b0a4988ad2f8feb5f009 +zipp==3.10.0 \ + --hash=sha256:4fcb6f278987a6605757302a6e40e896257570d11c51628968ccb2a47e80c6c1 \ + --hash=sha256:7a7262fd930bd3e36c50b9a64897aec3fafff3dfdeec9623ae22b40e93f99bb8 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==65.2.0 \ - --hash=sha256:7f4bc85450898a09f76ebf28b72fa25bc7111f6c7d665d514a60bba9c75ef2a9 \ - --hash=sha256:a3ca5857c89f82f5c9410e8508cb32f4872a3bafd4aa7ae122a24ca33bccc750 +setuptools==65.5.1 \ + --hash=sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31 \ + --hash=sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f # via -r requirements.in diff --git a/packages/db-dtypes/noxfile.py b/packages/db-dtypes/noxfile.py index 3717f13b29f2..3a761be9807e 100644 --- a/packages/db-dtypes/noxfile.py +++ b/packages/db-dtypes/noxfile.py @@ -369,12 +369,16 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.session(python="3.9") def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx==4.0.1", "alabaster", "recommonmark") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( @@ -391,13 +395,16 @@ def docs(session): ) -@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.session(python="3.9") def docfx(session): """Build the docfx yaml files for this library.""" session.install("-e", ".") session.install( - "sphinx==4.0.1", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml" + "sphinx==4.0.1", + "alabaster", + "recommonmark", + "gcp-sphinx-docfx-yaml", ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) From 9a3d36c2a5e3ce1b1e62a5f3376cea8f3e16d187 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 26 Nov 2022 19:01:09 -0500 Subject: [PATCH 092/210] chore(python): drop flake8-import-order in samples noxfile (#156) Source-Link: https://github.com/googleapis/synthtool/commit/6ed3a831cb9ff69ef8a504c353e098ec0192ad93 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:3abfa0f1886adaf0b83f07cb117b24a639ea1cb9cffe56d43280b977033563eb Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 2 +- .../db-dtypes/samples/snippets/noxfile.py | 26 +++---------------- 2 files changed, 4 insertions(+), 24 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 3f1ccc085ef7..bb21147e4c23 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:e6cbd61f1838d9ff6a31436dfc13717f372a7482a82fc1863ca954ec47bff8c8 + digest: sha256:3abfa0f1886adaf0b83f07cb117b24a639ea1cb9cffe56d43280b977033563eb diff --git a/packages/db-dtypes/samples/snippets/noxfile.py b/packages/db-dtypes/samples/snippets/noxfile.py index b053ca568f63..e8283c38d4a0 100644 --- a/packages/db-dtypes/samples/snippets/noxfile.py +++ b/packages/db-dtypes/samples/snippets/noxfile.py @@ -18,7 +18,7 @@ import os from pathlib import Path import sys -from typing import Callable, Dict, List, Optional +from typing import Callable, Dict, Optional import nox @@ -109,22 +109,6 @@ def get_pytest_env_vars() -> Dict[str, str]: # -def _determine_local_import_names(start_dir: str) -> List[str]: - """Determines all import names that should be considered "local". - - This is used when running the linter to insure that import order is - properly checked. - """ - file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] - return [ - basename - for basename, extension in file_ext_pairs - if extension == ".py" - or os.path.isdir(os.path.join(start_dir, basename)) - and basename not in ("__pycache__") - ] - - # Linting with flake8. # # We ignore the following rules: @@ -139,7 +123,6 @@ def _determine_local_import_names(start_dir: str) -> List[str]: "--show-source", "--builtin=gettext", "--max-complexity=20", - "--import-order-style=google", "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", "--max-line-length=88", @@ -149,14 +132,11 @@ def _determine_local_import_names(start_dir: str) -> List[str]: @nox.session def lint(session: nox.sessions.Session) -> None: if not TEST_CONFIG["enforce_type_hints"]: - session.install("flake8", "flake8-import-order") + session.install("flake8") else: - session.install("flake8", "flake8-import-order", "flake8-annotations") + session.install("flake8", "flake8-annotations") - local_names = _determine_local_import_names(".") args = FLAKE8_COMMON_ARGS + [ - "--application-import-names", - ",".join(local_names), ".", ] session.run("flake8", *args) From 3ca24531981fba7747ce8e6e03fc0fc0b5176315 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 5 Dec 2022 13:26:00 -0500 Subject: [PATCH 093/210] ci: update the job name in unittest-prerelease.yml (#164) --- packages/db-dtypes/.github/workflows/unittest-prerelease.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/db-dtypes/.github/workflows/unittest-prerelease.yml b/packages/db-dtypes/.github/workflows/unittest-prerelease.yml index 1cf125c0a223..776945a98345 100644 --- a/packages/db-dtypes/.github/workflows/unittest-prerelease.yml +++ b/packages/db-dtypes/.github/workflows/unittest-prerelease.yml @@ -4,7 +4,7 @@ on: - main name: unittest-prerelease jobs: - unit: + unit-prerelease: runs-on: ubuntu-latest strategy: matrix: From 43971d3fa7d317dc8a1b7ff6961193f198d68f35 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 5 Dec 2022 11:21:09 -0800 Subject: [PATCH 094/210] chore(main): release 1.0.5 (#163) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/db-dtypes/CHANGELOG.md | 7 +++++++ packages/db-dtypes/db_dtypes/version.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/db-dtypes/CHANGELOG.md b/packages/db-dtypes/CHANGELOG.md index 8a9b05e758b3..bc43cdedef97 100644 --- a/packages/db-dtypes/CHANGELOG.md +++ b/packages/db-dtypes/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.0.5](https://github.com/googleapis/python-db-dtypes-pandas/compare/v1.0.4...v1.0.5) (2022-12-05) + + +### Dependencies + +* remove upper bound on pyarrow version ([388e082](https://github.com/googleapis/python-db-dtypes-pandas/commit/388e082a47d9515a14e20ffd87705c71712087ab)) + ## [1.0.4](https://github.com/googleapis/python-db-dtypes-pandas/compare/v1.0.3...v1.0.4) (2022-09-19) diff --git a/packages/db-dtypes/db_dtypes/version.py b/packages/db-dtypes/db_dtypes/version.py index fa2fdbccf89e..8794e1f45403 100644 --- a/packages/db-dtypes/db_dtypes/version.py +++ b/packages/db-dtypes/db_dtypes/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "1.0.4" +__version__ = "1.0.5" From 1c8e64297b6fc00d25700f32ecc2f4f1005285c4 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 13 Dec 2022 12:48:34 -0500 Subject: [PATCH 095/210] build(deps): bump certifi from 2022.9.24 to 2022.12.7 in /synthtool/gcp/templates/python_library/.kokoro (#165) Source-Link: https://github.com/googleapis/synthtool/commit/b4fe62efb5114b6738ad4b13d6f654f2bf4b7cc0 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:3bf87e47c2173d7eed42714589dc4da2c07c3268610f1e47f8e1a30decbfc7f1 Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 2 +- packages/db-dtypes/.kokoro/requirements.txt | 6 +++--- packages/db-dtypes/.pre-commit-config.yaml | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index bb21147e4c23..fccaa8e84449 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:3abfa0f1886adaf0b83f07cb117b24a639ea1cb9cffe56d43280b977033563eb + digest: sha256:3bf87e47c2173d7eed42714589dc4da2c07c3268610f1e47f8e1a30decbfc7f1 diff --git a/packages/db-dtypes/.kokoro/requirements.txt b/packages/db-dtypes/.kokoro/requirements.txt index 9c1b9be34e6b..05dc4672edaa 100644 --- a/packages/db-dtypes/.kokoro/requirements.txt +++ b/packages/db-dtypes/.kokoro/requirements.txt @@ -20,9 +20,9 @@ cachetools==5.2.0 \ --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db # via google-auth -certifi==2022.9.24 \ - --hash=sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14 \ - --hash=sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382 +certifi==2022.12.7 \ + --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \ + --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18 # via requests cffi==1.15.1 \ --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ diff --git a/packages/db-dtypes/.pre-commit-config.yaml b/packages/db-dtypes/.pre-commit-config.yaml index 46d237160f6d..5405cc8ff1f3 100644 --- a/packages/db-dtypes/.pre-commit-config.yaml +++ b/packages/db-dtypes/.pre-commit-config.yaml @@ -25,7 +25,7 @@ repos: rev: 22.3.0 hooks: - id: black -- repo: https://gitlab.com/pycqa/flake8 +- repo: https://github.com/pycqa/flake8 rev: 3.9.2 hooks: - id: flake8 From 64d87b7d9dfc23b04d138146ab4e28a994bf3a42 Mon Sep 17 00:00:00 2001 From: Chalmer Lowe Date: Tue, 3 Jan 2023 10:55:08 -0500 Subject: [PATCH 096/210] =?UTF-8?q?fix:=20adds=20bounds=20checking=20becau?= =?UTF-8?q?se=20pandas=20now=20handles=20microsecond=20reso=E2=80=A6=20(#1?= =?UTF-8?q?66)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: adds bounds checking because pandas now handles microsecond resolution * Update db_dtypes/__init__.py Co-authored-by: Tim Swast Co-authored-by: Tim Swast --- packages/db-dtypes/db_dtypes/__init__.py | 15 ++++++++++++++- packages/db-dtypes/tests/unit/test_date.py | 17 ++++++++++++++--- 2 files changed, 28 insertions(+), 4 deletions(-) diff --git a/packages/db-dtypes/db_dtypes/__init__.py b/packages/db-dtypes/db_dtypes/__init__.py index 2b51bcdbb2a0..3ecefedf814a 100644 --- a/packages/db-dtypes/db_dtypes/__init__.py +++ b/packages/db-dtypes/db_dtypes/__init__.py @@ -23,9 +23,11 @@ import packaging.version import pandas import pandas.api.extensions +from pandas.errors import OutOfBoundsDatetime import pyarrow import pyarrow.compute + from db_dtypes.version import __version__ from db_dtypes import core @@ -143,6 +145,7 @@ def _datetime( second = parsed.group("seconds") fraction = parsed.group("fraction") nanosecond = int(fraction.ljust(9, "0")[:9]) if fraction else 0 + return pandas.Timestamp( year=1970, month=1, @@ -263,7 +266,17 @@ def _datetime( year = int(match.group("year")) month = int(match.group("month")) day = int(match.group("day")) - return pandas.Timestamp(year=year, month=month, day=day).to_datetime64() + + dateObj = pandas.Timestamp( + year=year, + month=month, + day=day, + ) + if pandas.Timestamp.min < dateObj < pandas.Timestamp.max: + return dateObj.to_datetime64() + else: # pragma: NO COVER + # TODO(#166): Include these lines in coverage when pandas 2.0 is released. + raise OutOfBoundsDatetime("Out of bounds", scalar) # pragma: NO COVER else: raise TypeError("Invalid value type", scalar) diff --git a/packages/db-dtypes/tests/unit/test_date.py b/packages/db-dtypes/tests/unit/test_date.py index bbe74cbdd448..5bd081213f89 100644 --- a/packages/db-dtypes/tests/unit/test_date.py +++ b/packages/db-dtypes/tests/unit/test_date.py @@ -18,6 +18,7 @@ import numpy import numpy.testing import pandas +from pandas.errors import OutOfBoundsDatetime import pandas.testing import pytest @@ -143,15 +144,25 @@ def test_date_set_slice_null(): ("2021-2-99", "day is out of range for month"), ("2021-99-1", "month must be in 1[.][.]12"), ("10000-1-1", "year 10000 is out of range"), - # Outside of min/max values pandas.Timestamp. + ], +) +def test_date_parsing_errors(value, error): + with pytest.raises(ValueError, match=error): + pandas.Series([value], dtype="dbdate") + + +@pytest.mark.parametrize( + "value, error", + [ + # Values that are outside of the min/max values allowed by pandas.Timestamp ("0001-01-01", "Out of bounds"), ("9999-12-31", "Out of bounds"), ("1677-09-21", "Out of bounds"), ("2262-04-12", "Out of bounds"), ], ) -def test_date_parsing_errors(value, error): - with pytest.raises(ValueError, match=error): +def test_date_parsing_errors_out_of_bounds(value, error): + with pytest.raises(OutOfBoundsDatetime, match=error): pandas.Series([value], dtype="dbdate") From b14a51bef12553f29f63ab2d4184fc9e5472d50f Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 6 Jan 2023 13:59:21 -0500 Subject: [PATCH 097/210] chore(python): add support for python 3.11 (#168) Source-Link: https://github.com/googleapis/synthtool/commit/7197a001ffb6d8ce7b0b9b11c280f0c536c1033a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:c43f1d918bcf817d337aa29ff833439494a158a0831508fda4ec75dc4c0d0320 Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 4 +- .../db-dtypes/.github/workflows/unittest.yml | 2 +- .../.kokoro/samples/python3.11/common.cfg | 40 +++++++++++++++++++ .../.kokoro/samples/python3.11/continuous.cfg | 6 +++ .../samples/python3.11/periodic-head.cfg | 11 +++++ .../.kokoro/samples/python3.11/periodic.cfg | 6 +++ .../.kokoro/samples/python3.11/presubmit.cfg | 6 +++ packages/db-dtypes/CONTRIBUTING.rst | 6 ++- packages/db-dtypes/noxfile.py | 2 +- .../db-dtypes/samples/snippets/noxfile.py | 2 +- 10 files changed, 78 insertions(+), 7 deletions(-) create mode 100644 packages/db-dtypes/.kokoro/samples/python3.11/common.cfg create mode 100644 packages/db-dtypes/.kokoro/samples/python3.11/continuous.cfg create mode 100644 packages/db-dtypes/.kokoro/samples/python3.11/periodic-head.cfg create mode 100644 packages/db-dtypes/.kokoro/samples/python3.11/periodic.cfg create mode 100644 packages/db-dtypes/.kokoro/samples/python3.11/presubmit.cfg diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index fccaa8e84449..889f77dfa25d 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -1,4 +1,4 @@ -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:3bf87e47c2173d7eed42714589dc4da2c07c3268610f1e47f8e1a30decbfc7f1 + digest: sha256:c43f1d918bcf817d337aa29ff833439494a158a0831508fda4ec75dc4c0d0320 diff --git a/packages/db-dtypes/.github/workflows/unittest.yml b/packages/db-dtypes/.github/workflows/unittest.yml index 23000c05d9d8..8057a7691b12 100644 --- a/packages/db-dtypes/.github/workflows/unittest.yml +++ b/packages/db-dtypes/.github/workflows/unittest.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ['3.7', '3.8', '3.9', '3.10'] + python: ['3.7', '3.8', '3.9', '3.10', '3.11'] steps: - name: Checkout uses: actions/checkout@v3 diff --git a/packages/db-dtypes/.kokoro/samples/python3.11/common.cfg b/packages/db-dtypes/.kokoro/samples/python3.11/common.cfg new file mode 100644 index 000000000000..6f8962a49b8a --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/python3.11/common.cfg @@ -0,0 +1,40 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.11" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-311" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-db-dtypes-pandas/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-db-dtypes-pandas/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.11/continuous.cfg b/packages/db-dtypes/.kokoro/samples/python3.11/continuous.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/python3.11/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.11/periodic-head.cfg b/packages/db-dtypes/.kokoro/samples/python3.11/periodic-head.cfg new file mode 100644 index 000000000000..ee3d56408db9 --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/python3.11/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-db-dtypes-pandas/.kokoro/test-samples-against-head.sh" +} diff --git a/packages/db-dtypes/.kokoro/samples/python3.11/periodic.cfg b/packages/db-dtypes/.kokoro/samples/python3.11/periodic.cfg new file mode 100644 index 000000000000..71cd1e597e38 --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/python3.11/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/packages/db-dtypes/.kokoro/samples/python3.11/presubmit.cfg b/packages/db-dtypes/.kokoro/samples/python3.11/presubmit.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/python3.11/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/db-dtypes/CONTRIBUTING.rst b/packages/db-dtypes/CONTRIBUTING.rst index 5f36f12da0ac..f6d526c7ce8c 100644 --- a/packages/db-dtypes/CONTRIBUTING.rst +++ b/packages/db-dtypes/CONTRIBUTING.rst @@ -22,7 +22,7 @@ In order to add a feature: documentation. - The feature must work fully on the following CPython versions: - 3.7, 3.8, 3.9 and 3.10 on both UNIX and Windows. + 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -72,7 +72,7 @@ We use `nox `__ to instrument our tests. - To run a single unit test:: - $ nox -s unit-3.10 -- -k + $ nox -s unit-3.11 -- -k .. note:: @@ -225,11 +225,13 @@ We support: - `Python 3.8`_ - `Python 3.9`_ - `Python 3.10`_ +- `Python 3.11`_ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ .. _Python 3.9: https://docs.python.org/3.9/ .. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ Supported versions can be found in our ``noxfile.py`` `config`_. diff --git a/packages/db-dtypes/noxfile.py b/packages/db-dtypes/noxfile.py index 3a761be9807e..4b6d7fa7e89f 100644 --- a/packages/db-dtypes/noxfile.py +++ b/packages/db-dtypes/noxfile.py @@ -32,7 +32,7 @@ DEFAULT_PYTHON_VERSION = "3.8" -UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] UNIT_TEST_STANDARD_DEPENDENCIES = [ "mock", "asyncmock", diff --git a/packages/db-dtypes/samples/snippets/noxfile.py b/packages/db-dtypes/samples/snippets/noxfile.py index e8283c38d4a0..1224cbe212e4 100644 --- a/packages/db-dtypes/samples/snippets/noxfile.py +++ b/packages/db-dtypes/samples/snippets/noxfile.py @@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] From 3a293f88002c4733a1c6bee0323587cff3bb1f5a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 30 Jan 2023 16:52:31 +0000 Subject: [PATCH 098/210] chore: fix prerelease_deps nox session [autoapprove] (#169) Source-Link: https://togithub.com/googleapis/synthtool/commit/26c7505b2f76981ec1707b851e1595c8c06e90fc Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:f946c75373c2b0040e8e318c5e85d0cf46bc6e61d0a01f3ef94d8de974ac6790 --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 2 +- packages/db-dtypes/noxfile.py | 14 ++++++-------- 2 files changed, 7 insertions(+), 9 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 889f77dfa25d..f0f3b24b20cd 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:c43f1d918bcf817d337aa29ff833439494a158a0831508fda4ec75dc4c0d0320 + digest: sha256:f946c75373c2b0040e8e318c5e85d0cf46bc6e61d0a01f3ef94d8de974ac6790 diff --git a/packages/db-dtypes/noxfile.py b/packages/db-dtypes/noxfile.py index 4b6d7fa7e89f..6158b2523647 100644 --- a/packages/db-dtypes/noxfile.py +++ b/packages/db-dtypes/noxfile.py @@ -285,9 +285,9 @@ def unit_prerelease(session): def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. - # Exclude version 1.49.0rc1 which has a known issue. - # See https://github.com/grpc/grpc/pull/30642 - session.install("--pre", "grpcio!=1.49.0rc1") + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) @@ -442,9 +442,7 @@ def prerelease_deps(session): unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES session.install(*unit_deps_all) system_deps_all = ( - SYSTEM_TEST_STANDARD_DEPENDENCIES - + SYSTEM_TEST_EXTERNAL_DEPENDENCIES - + SYSTEM_TEST_EXTRAS + SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES ) session.install(*system_deps_all) @@ -474,8 +472,8 @@ def prerelease_deps(session): # dependency of grpc "six", "googleapis-common-protos", - # Exclude version 1.49.0rc1 which has a known issue. See https://github.com/grpc/grpc/pull/30642 - "grpcio!=1.49.0rc1", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", "grpcio-status", "google-api-core", "proto-plus", From fdee2021bf9ab312f5726077a955ee4a3f8208be Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 30 Jan 2023 17:45:25 +0000 Subject: [PATCH 099/210] chore(deps): update dependency pytest to v7.2.1 (#170) * chore(deps): update dependency pytest to v7.2.1 * use python 3.11 in prerelease github action workflow * use python 3.11 in compliance github action workflow --------- Co-authored-by: Anthonios Partheniou --- packages/db-dtypes/.github/workflows/compliance.yml | 4 ++-- packages/db-dtypes/.github/workflows/unittest-prerelease.yml | 2 +- packages/db-dtypes/samples/snippets/requirements-test.txt | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/db-dtypes/.github/workflows/compliance.yml b/packages/db-dtypes/.github/workflows/compliance.yml index aa49744df5b5..0ae11019d68a 100644 --- a/packages/db-dtypes/.github/workflows/compliance.yml +++ b/packages/db-dtypes/.github/workflows/compliance.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ['3.10'] + python: ['3.11'] steps: - name: Checkout uses: actions/checkout@v3 @@ -29,7 +29,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ['3.10'] + python: ['3.11'] steps: - name: Checkout uses: actions/checkout@v3 diff --git a/packages/db-dtypes/.github/workflows/unittest-prerelease.yml b/packages/db-dtypes/.github/workflows/unittest-prerelease.yml index 776945a98345..18fc3f09e658 100644 --- a/packages/db-dtypes/.github/workflows/unittest-prerelease.yml +++ b/packages/db-dtypes/.github/workflows/unittest-prerelease.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ['3.10'] + python: ['3.11'] steps: - name: Checkout uses: actions/checkout@v3 diff --git a/packages/db-dtypes/samples/snippets/requirements-test.txt b/packages/db-dtypes/samples/snippets/requirements-test.txt index 49780e035690..805eb2a9f845 100644 --- a/packages/db-dtypes/samples/snippets/requirements-test.txt +++ b/packages/db-dtypes/samples/snippets/requirements-test.txt @@ -1 +1 @@ -pytest==7.2.0 +pytest==7.2.1 From 244d49612cdbf09084739f08d648243273ffae09 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 8 Feb 2023 15:16:24 +0000 Subject: [PATCH 100/210] build(deps): bump cryptography from 38.0.3 to 39.0.1 in /synthtool/gcp/templates/python_library/.kokoro (#171) Source-Link: https://togithub.com/googleapis/synthtool/commit/bb171351c3946d3c3c32e60f5f18cee8c464ec51 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:f62c53736eccb0c4934a3ea9316e0d57696bb49c1a7c86c726e9bb8a2f87dadf --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 2 +- packages/db-dtypes/.kokoro/requirements.txt | 49 +++++++++----------- 2 files changed, 23 insertions(+), 28 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index f0f3b24b20cd..894fb6bc9b47 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:f946c75373c2b0040e8e318c5e85d0cf46bc6e61d0a01f3ef94d8de974ac6790 + digest: sha256:f62c53736eccb0c4934a3ea9316e0d57696bb49c1a7c86c726e9bb8a2f87dadf diff --git a/packages/db-dtypes/.kokoro/requirements.txt b/packages/db-dtypes/.kokoro/requirements.txt index 05dc4672edaa..096e4800a9ac 100644 --- a/packages/db-dtypes/.kokoro/requirements.txt +++ b/packages/db-dtypes/.kokoro/requirements.txt @@ -113,33 +113,28 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==38.0.3 \ - --hash=sha256:068147f32fa662c81aebab95c74679b401b12b57494872886eb5c1139250ec5d \ - --hash=sha256:06fc3cc7b6f6cca87bd56ec80a580c88f1da5306f505876a71c8cfa7050257dd \ - --hash=sha256:25c1d1f19729fb09d42e06b4bf9895212292cb27bb50229f5aa64d039ab29146 \ - --hash=sha256:402852a0aea73833d982cabb6d0c3bb582c15483d29fb7085ef2c42bfa7e38d7 \ - --hash=sha256:4e269dcd9b102c5a3d72be3c45d8ce20377b8076a43cbed6f660a1afe365e436 \ - --hash=sha256:5419a127426084933076132d317911e3c6eb77568a1ce23c3ac1e12d111e61e0 \ - --hash=sha256:554bec92ee7d1e9d10ded2f7e92a5d70c1f74ba9524947c0ba0c850c7b011828 \ - --hash=sha256:5e89468fbd2fcd733b5899333bc54d0d06c80e04cd23d8c6f3e0542358c6060b \ - --hash=sha256:65535bc550b70bd6271984d9863a37741352b4aad6fb1b3344a54e6950249b55 \ - --hash=sha256:6ab9516b85bebe7aa83f309bacc5f44a61eeb90d0b4ec125d2d003ce41932d36 \ - --hash=sha256:6addc3b6d593cd980989261dc1cce38263c76954d758c3c94de51f1e010c9a50 \ - --hash=sha256:728f2694fa743a996d7784a6194da430f197d5c58e2f4e278612b359f455e4a2 \ - --hash=sha256:785e4056b5a8b28f05a533fab69febf5004458e20dad7e2e13a3120d8ecec75a \ - --hash=sha256:78cf5eefac2b52c10398a42765bfa981ce2372cbc0457e6bf9658f41ec3c41d8 \ - --hash=sha256:7f836217000342d448e1c9a342e9163149e45d5b5eca76a30e84503a5a96cab0 \ - --hash=sha256:8d41a46251bf0634e21fac50ffd643216ccecfaf3701a063257fe0b2be1b6548 \ - --hash=sha256:984fe150f350a3c91e84de405fe49e688aa6092b3525f407a18b9646f6612320 \ - --hash=sha256:9b24bcff7853ed18a63cfb0c2b008936a9554af24af2fb146e16d8e1aed75748 \ - --hash=sha256:b1b35d9d3a65542ed2e9d90115dfd16bbc027b3f07ee3304fc83580f26e43249 \ - --hash=sha256:b1b52c9e5f8aa2b802d48bd693190341fae201ea51c7a167d69fc48b60e8a959 \ - --hash=sha256:bbf203f1a814007ce24bd4d51362991d5cb90ba0c177a9c08825f2cc304d871f \ - --hash=sha256:be243c7e2bfcf6cc4cb350c0d5cdf15ca6383bbcb2a8ef51d3c9411a9d4386f0 \ - --hash=sha256:bfbe6ee19615b07a98b1d2287d6a6073f734735b49ee45b11324d85efc4d5cbd \ - --hash=sha256:c46837ea467ed1efea562bbeb543994c2d1f6e800785bd5a2c98bc096f5cb220 \ - --hash=sha256:dfb4f4dd568de1b6af9f4cda334adf7d72cf5bc052516e1b2608b683375dd95c \ - --hash=sha256:ed7b00096790213e09eb11c97cc6e2b757f15f3d2f85833cd2d3ec3fe37c1722 +cryptography==39.0.1 \ + --hash=sha256:0f8da300b5c8af9f98111ffd512910bc792b4c77392a9523624680f7956a99d4 \ + --hash=sha256:35f7c7d015d474f4011e859e93e789c87d21f6f4880ebdc29896a60403328f1f \ + --hash=sha256:5aa67414fcdfa22cf052e640cb5ddc461924a045cacf325cd164e65312d99502 \ + --hash=sha256:5d2d8b87a490bfcd407ed9d49093793d0f75198a35e6eb1a923ce1ee86c62b41 \ + --hash=sha256:6687ef6d0a6497e2b58e7c5b852b53f62142cfa7cd1555795758934da363a965 \ + --hash=sha256:6f8ba7f0328b79f08bdacc3e4e66fb4d7aab0c3584e0bd41328dce5262e26b2e \ + --hash=sha256:706843b48f9a3f9b9911979761c91541e3d90db1ca905fd63fee540a217698bc \ + --hash=sha256:807ce09d4434881ca3a7594733669bd834f5b2c6d5c7e36f8c00f691887042ad \ + --hash=sha256:83e17b26de248c33f3acffb922748151d71827d6021d98c70e6c1a25ddd78505 \ + --hash=sha256:96f1157a7c08b5b189b16b47bc9db2332269d6680a196341bf30046330d15388 \ + --hash=sha256:aec5a6c9864be7df2240c382740fcf3b96928c46604eaa7f3091f58b878c0bb6 \ + --hash=sha256:b0afd054cd42f3d213bf82c629efb1ee5f22eba35bf0eec88ea9ea7304f511a2 \ + --hash=sha256:ced4e447ae29ca194449a3f1ce132ded8fcab06971ef5f618605aacaa612beac \ + --hash=sha256:d1f6198ee6d9148405e49887803907fe8962a23e6c6f83ea7d98f1c0de375695 \ + --hash=sha256:e124352fd3db36a9d4a21c1aa27fd5d051e621845cb87fb851c08f4f75ce8be6 \ + --hash=sha256:e422abdec8b5fa8462aa016786680720d78bdce7a30c652b7fadf83a4ba35336 \ + --hash=sha256:ef8b72fa70b348724ff1218267e7f7375b8de4e8194d1636ee60510aae104cd0 \ + --hash=sha256:f0c64d1bd842ca2633e74a1a28033d139368ad959872533b1bab8c80e8240a0c \ + --hash=sha256:f24077a3b5298a5a06a8e0536e3ea9ec60e4c7ac486755e5fb6e6ea9b3500106 \ + --hash=sha256:fdd188c8a6ef8769f148f88f859884507b954cc64db6b52f66ef199bb9ad660a \ + --hash=sha256:fe913f20024eb2cb2f323e42a64bdf2911bb9738a15dba7d3cce48151034e3a8 # via # gcp-releasetool # secretstorage From 9f6ead1a5f2fe4da357f3d32a21a7a17b0a89fcd Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 27 Feb 2023 11:55:26 -0500 Subject: [PATCH 101/210] chore(python): upgrade gcp-releasetool in .kokoro [autoapprove] (#173) Source-Link: https://github.com/googleapis/synthtool/commit/5f2a6089f73abf06238fe4310f6a14d6f6d1eed3 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:8555f0e37e6261408f792bfd6635102d2da5ad73f8f09bcb24f25e6afb5fac97 Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 2 +- packages/db-dtypes/.kokoro/requirements.in | 2 +- packages/db-dtypes/.kokoro/requirements.txt | 6 +++--- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 894fb6bc9b47..5fc5daa31783 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:f62c53736eccb0c4934a3ea9316e0d57696bb49c1a7c86c726e9bb8a2f87dadf + digest: sha256:8555f0e37e6261408f792bfd6635102d2da5ad73f8f09bcb24f25e6afb5fac97 diff --git a/packages/db-dtypes/.kokoro/requirements.in b/packages/db-dtypes/.kokoro/requirements.in index cbd7e77f44db..882178ce6001 100644 --- a/packages/db-dtypes/.kokoro/requirements.in +++ b/packages/db-dtypes/.kokoro/requirements.in @@ -1,5 +1,5 @@ gcp-docuploader -gcp-releasetool +gcp-releasetool>=1.10.5 # required for compatibility with cryptography>=39.x importlib-metadata typing-extensions twine diff --git a/packages/db-dtypes/.kokoro/requirements.txt b/packages/db-dtypes/.kokoro/requirements.txt index 096e4800a9ac..fa99c12908f0 100644 --- a/packages/db-dtypes/.kokoro/requirements.txt +++ b/packages/db-dtypes/.kokoro/requirements.txt @@ -154,9 +154,9 @@ gcp-docuploader==0.6.4 \ --hash=sha256:01486419e24633af78fd0167db74a2763974765ee8078ca6eb6964d0ebd388af \ --hash=sha256:70861190c123d907b3b067da896265ead2eeb9263969d6955c9e0bb091b5ccbf # via -r requirements.in -gcp-releasetool==1.10.0 \ - --hash=sha256:72a38ca91b59c24f7e699e9227c90cbe4dd71b789383cb0164b088abae294c83 \ - --hash=sha256:8c7c99320208383d4bb2b808c6880eb7a81424afe7cdba3c8d84b25f4f0e097d +gcp-releasetool==1.10.5 \ + --hash=sha256:174b7b102d704b254f2a26a3eda2c684fd3543320ec239baf771542a2e58e109 \ + --hash=sha256:e29d29927fe2ca493105a82958c6873bb2b90d503acac56be2c229e74de0eec9 # via -r requirements.in google-api-core==2.10.2 \ --hash=sha256:10c06f7739fe57781f87523375e8e1a3a4674bf6392cd6131a3222182b971320 \ From 9734240d67f8b1a44a4da6b3139153989101ad18 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Sat, 4 Mar 2023 11:27:59 +0000 Subject: [PATCH 102/210] chore(deps): update dependency pytest to v7.2.2 (#174) --- packages/db-dtypes/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/db-dtypes/samples/snippets/requirements-test.txt b/packages/db-dtypes/samples/snippets/requirements-test.txt index 805eb2a9f845..c021c5b5b702 100644 --- a/packages/db-dtypes/samples/snippets/requirements-test.txt +++ b/packages/db-dtypes/samples/snippets/requirements-test.txt @@ -1 +1 @@ -pytest==7.2.1 +pytest==7.2.2 From e2875d0da57a3e41a7cd99f33242de96740aa401 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 16 Mar 2023 08:25:00 -0400 Subject: [PATCH 103/210] chore(deps): Update nox in .kokoro/requirements.in [autoapprove] (#177) Source-Link: https://github.com/googleapis/synthtool/commit/92006bb3cdc84677aa93c7f5235424ec2b157146 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:2e247c7bf5154df7f98cce087a20ca7605e236340c7d6d1a14447e5c06791bd6 Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 2 +- packages/db-dtypes/.kokoro/requirements.in | 2 +- packages/db-dtypes/.kokoro/requirements.txt | 14 +++++--------- 3 files changed, 7 insertions(+), 11 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 5fc5daa31783..b8edda51cf46 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:8555f0e37e6261408f792bfd6635102d2da5ad73f8f09bcb24f25e6afb5fac97 + digest: sha256:2e247c7bf5154df7f98cce087a20ca7605e236340c7d6d1a14447e5c06791bd6 diff --git a/packages/db-dtypes/.kokoro/requirements.in b/packages/db-dtypes/.kokoro/requirements.in index 882178ce6001..ec867d9fd65a 100644 --- a/packages/db-dtypes/.kokoro/requirements.in +++ b/packages/db-dtypes/.kokoro/requirements.in @@ -5,6 +5,6 @@ typing-extensions twine wheel setuptools -nox +nox>=2022.11.21 # required to remove dependency on py charset-normalizer<3 click<8.1.0 diff --git a/packages/db-dtypes/.kokoro/requirements.txt b/packages/db-dtypes/.kokoro/requirements.txt index fa99c12908f0..66a2172a76a8 100644 --- a/packages/db-dtypes/.kokoro/requirements.txt +++ b/packages/db-dtypes/.kokoro/requirements.txt @@ -1,6 +1,6 @@ # -# This file is autogenerated by pip-compile with python 3.10 -# To update, run: +# This file is autogenerated by pip-compile with Python 3.9 +# by the following command: # # pip-compile --allow-unsafe --generate-hashes requirements.in # @@ -335,9 +335,9 @@ more-itertools==9.0.0 \ --hash=sha256:250e83d7e81d0c87ca6bd942e6aeab8cc9daa6096d12c5308f3f92fa5e5c1f41 \ --hash=sha256:5a6257e40878ef0520b1803990e3e22303a41b5714006c32a3fd8304b26ea1ab # via jaraco-classes -nox==2022.8.7 \ - --hash=sha256:1b894940551dc5c389f9271d197ca5d655d40bdc6ccf93ed6880e4042760a34b \ - --hash=sha256:96cca88779e08282a699d672258ec01eb7c792d35bbbf538c723172bce23212c +nox==2022.11.21 \ + --hash=sha256:0e41a990e290e274cb205a976c4c97ee3c5234441a8132c8c3fd9ea3c22149eb \ + --hash=sha256:e21c31de0711d1274ca585a2c5fde36b1aa962005ba8e9322bf5eeed16dcd684 # via -r requirements.in packaging==21.3 \ --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \ @@ -380,10 +380,6 @@ protobuf==3.20.3 \ # gcp-docuploader # gcp-releasetool # google-api-core -py==1.11.0 \ - --hash=sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719 \ - --hash=sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378 - # via nox pyasn1==0.4.8 \ --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba From cc0fcb2b46dfac3a036ae3d57ab9ef3197967a54 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Wed, 29 Mar 2023 13:37:44 -0500 Subject: [PATCH 104/210] feat: support pandas 2.0 release candidate (#179) --- packages/db-dtypes/setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/db-dtypes/setup.py b/packages/db-dtypes/setup.py index ba6388fb920c..cb24515d20ae 100644 --- a/packages/db-dtypes/setup.py +++ b/packages/db-dtypes/setup.py @@ -30,9 +30,9 @@ dependencies = [ "packaging >= 17.0", - "pandas >= 0.24.2, < 2.0dev", + "pandas >= 0.24.2", "pyarrow>=3.0.0", - "numpy >= 1.16.6, < 2.0dev", + "numpy >= 1.16.6", ] package_root = os.path.abspath(os.path.dirname(__file__)) From cd5be8c4e0fa6367f2f34caae286fa5c3cf76128 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 29 Mar 2023 13:49:15 -0500 Subject: [PATCH 105/210] chore(main): release 1.1.0 (#167) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/db-dtypes/CHANGELOG.md | 12 ++++++++++++ packages/db-dtypes/db_dtypes/version.py | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/packages/db-dtypes/CHANGELOG.md b/packages/db-dtypes/CHANGELOG.md index bc43cdedef97..9b748711044f 100644 --- a/packages/db-dtypes/CHANGELOG.md +++ b/packages/db-dtypes/CHANGELOG.md @@ -1,5 +1,17 @@ # Changelog +## [1.1.0](https://github.com/googleapis/python-db-dtypes-pandas/compare/v1.0.5...v1.1.0) (2023-03-29) + + +### Features + +* Support pandas 2.0 release candidate ([#179](https://github.com/googleapis/python-db-dtypes-pandas/issues/179)) ([daa6852](https://github.com/googleapis/python-db-dtypes-pandas/commit/daa685234d283bc2f3c87a6127fd734d8a037ad6)) + + +### Bug Fixes + +* Adds bounds checking because pandas now handles microsecond reso… ([#166](https://github.com/googleapis/python-db-dtypes-pandas/issues/166)) ([357a315](https://github.com/googleapis/python-db-dtypes-pandas/commit/357a3156a3eb37eede2edb7fc84e93fe32967f11)) + ## [1.0.5](https://github.com/googleapis/python-db-dtypes-pandas/compare/v1.0.4...v1.0.5) (2022-12-05) diff --git a/packages/db-dtypes/db_dtypes/version.py b/packages/db-dtypes/db_dtypes/version.py index 8794e1f45403..acbb30a9dcd2 100644 --- a/packages/db-dtypes/db_dtypes/version.py +++ b/packages/db-dtypes/db_dtypes/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "1.0.5" +__version__ = "1.1.0" From 12f3e4705c432097c0a8e81e3af5a766235a2ad8 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Thu, 30 Mar 2023 13:32:56 -0500 Subject: [PATCH 106/210] fix: out-of-bounds datetime.date raises OutOfBoundsDatetime (#180) --- packages/db-dtypes/db_dtypes/__init__.py | 21 +++++++++++---------- packages/db-dtypes/tests/unit/test_date.py | 2 ++ 2 files changed, 13 insertions(+), 10 deletions(-) diff --git a/packages/db-dtypes/db_dtypes/__init__.py b/packages/db-dtypes/db_dtypes/__init__.py index 3ecefedf814a..54721a3e7bd1 100644 --- a/packages/db-dtypes/db_dtypes/__init__.py +++ b/packages/db-dtypes/db_dtypes/__init__.py @@ -246,19 +246,18 @@ def _datetime( scalar, match_fn=re.compile(r"\s*(?P\d+)-(?P\d+)-(?P\d+)\s*$").match, ) -> Optional[numpy.datetime64]: - if isinstance(scalar, numpy.datetime64): - return scalar - # Convert pyarrow values to datetime.date. if isinstance(scalar, (pyarrow.Date32Scalar, pyarrow.Date64Scalar)): scalar = scalar.as_py() if pandas.isna(scalar): return numpy.datetime64("NaT") + elif isinstance(scalar, numpy.datetime64): + dateObj = pandas.Timestamp(scalar) elif isinstance(scalar, datetime.date): - return pandas.Timestamp( + dateObj = pandas.Timestamp( year=scalar.year, month=scalar.month, day=scalar.day - ).to_datetime64() + ) elif isinstance(scalar, str): match = match_fn(scalar) if not match: @@ -272,14 +271,16 @@ def _datetime( month=month, day=day, ) - if pandas.Timestamp.min < dateObj < pandas.Timestamp.max: - return dateObj.to_datetime64() - else: # pragma: NO COVER - # TODO(#166): Include these lines in coverage when pandas 2.0 is released. - raise OutOfBoundsDatetime("Out of bounds", scalar) # pragma: NO COVER else: raise TypeError("Invalid value type", scalar) + # TODO(#64): Support larger ranges with other units. + if pandas.Timestamp.min < dateObj < pandas.Timestamp.max: + return dateObj.to_datetime64() + else: # pragma: NO COVER + # TODO(#166): Include these lines in coverage when pandas 2.0 is released. + raise OutOfBoundsDatetime("Out of bounds", scalar) # pragma: NO COVER + def _box_func(self, x): if pandas.isna(x): return pandas.NaT diff --git a/packages/db-dtypes/tests/unit/test_date.py b/packages/db-dtypes/tests/unit/test_date.py index 5bd081213f89..fddf1a0a22a5 100644 --- a/packages/db-dtypes/tests/unit/test_date.py +++ b/packages/db-dtypes/tests/unit/test_date.py @@ -159,6 +159,8 @@ def test_date_parsing_errors(value, error): ("9999-12-31", "Out of bounds"), ("1677-09-21", "Out of bounds"), ("2262-04-12", "Out of bounds"), + (datetime.date(1, 1, 1), "Out of bounds"), + (datetime.date(9999, 12, 31), "Out of bounds"), ], ) def test_date_parsing_errors_out_of_bounds(value, error): From 2af717ebf2904a9114cfdcd4871a8af93c78d6a3 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 30 Mar 2023 15:17:33 -0500 Subject: [PATCH 107/210] chore(main): release 1.1.1 (#181) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/db-dtypes/CHANGELOG.md | 7 +++++++ packages/db-dtypes/db_dtypes/version.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/db-dtypes/CHANGELOG.md b/packages/db-dtypes/CHANGELOG.md index 9b748711044f..431801f42ed1 100644 --- a/packages/db-dtypes/CHANGELOG.md +++ b/packages/db-dtypes/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.1.1](https://github.com/googleapis/python-db-dtypes-pandas/compare/v1.1.0...v1.1.1) (2023-03-30) + + +### Bug Fixes + +* Out-of-bounds datetime.date raises OutOfBoundsDatetime ([#180](https://github.com/googleapis/python-db-dtypes-pandas/issues/180)) ([4f3399e](https://github.com/googleapis/python-db-dtypes-pandas/commit/4f3399e3103c8ad8063b047c7718bcb5621038ca)) + ## [1.1.0](https://github.com/googleapis/python-db-dtypes-pandas/compare/v1.0.5...v1.1.0) (2023-03-29) diff --git a/packages/db-dtypes/db_dtypes/version.py b/packages/db-dtypes/db_dtypes/version.py index acbb30a9dcd2..7494067f71b8 100644 --- a/packages/db-dtypes/db_dtypes/version.py +++ b/packages/db-dtypes/db_dtypes/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "1.1.0" +__version__ = "1.1.1" From 80eddfb2688e8152fe648860ff351a1f72fb8e23 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 18 Apr 2023 18:11:38 +0200 Subject: [PATCH 108/210] chore(deps): update dependency pytest to v7.3.1 (#184) --- packages/db-dtypes/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/db-dtypes/samples/snippets/requirements-test.txt b/packages/db-dtypes/samples/snippets/requirements-test.txt index c021c5b5b702..c4d04a08d024 100644 --- a/packages/db-dtypes/samples/snippets/requirements-test.txt +++ b/packages/db-dtypes/samples/snippets/requirements-test.txt @@ -1 +1 @@ -pytest==7.2.2 +pytest==7.3.1 From 398bfed011ffb224b491ab5b9d07c564bdcd229a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 25 May 2023 12:40:23 -0400 Subject: [PATCH 109/210] build(deps): bump requests from 2.28.1 to 2.31.0 in /synthtool/gcp/templates/python_library/.kokoro (#186) Source-Link: https://github.com/googleapis/synthtool/commit/30bd01b4ab78bf1b2a425816e15b3e7e090993dd Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:9bc5fa3b62b091f60614c08a7fb4fd1d3e1678e326f34dd66ce1eefb5dc3267b Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 3 ++- packages/db-dtypes/.kokoro/requirements.txt | 6 +++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index b8edda51cf46..32b3c486591a 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,4 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:2e247c7bf5154df7f98cce087a20ca7605e236340c7d6d1a14447e5c06791bd6 + digest: sha256:9bc5fa3b62b091f60614c08a7fb4fd1d3e1678e326f34dd66ce1eefb5dc3267b +# created: 2023-05-25T14:56:16.294623272Z diff --git a/packages/db-dtypes/.kokoro/requirements.txt b/packages/db-dtypes/.kokoro/requirements.txt index 66a2172a76a8..3b8d7ee81848 100644 --- a/packages/db-dtypes/.kokoro/requirements.txt +++ b/packages/db-dtypes/.kokoro/requirements.txt @@ -419,9 +419,9 @@ readme-renderer==37.3 \ --hash=sha256:cd653186dfc73055656f090f227f5cb22a046d7f71a841dfa305f55c9a513273 \ --hash=sha256:f67a16caedfa71eef48a31b39708637a6f4664c4394801a7b0d6432d13907343 # via twine -requests==2.28.1 \ - --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \ - --hash=sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349 +requests==2.31.0 \ + --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ + --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1 # via # gcp-releasetool # google-api-core From 1a54a4b2c691e6a2e3bafe8f3b83633ecbc23852 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 3 Jun 2023 19:14:41 -0400 Subject: [PATCH 110/210] build(deps): bump cryptography from 39.0.1 to 41.0.0 in /synthtool/gcp/templates/python_library/.kokoro (#187) Source-Link: https://github.com/googleapis/synthtool/commit/d0f51a0c2a9a6bcca86911eabea9e484baadf64b Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:240b5bcc2bafd450912d2da2be15e62bc6de2cf839823ae4bf94d4f392b451dc Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 4 +- packages/db-dtypes/.kokoro/requirements.txt | 42 ++++++++++---------- 2 files changed, 22 insertions(+), 24 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 32b3c486591a..02a4dedced74 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:9bc5fa3b62b091f60614c08a7fb4fd1d3e1678e326f34dd66ce1eefb5dc3267b -# created: 2023-05-25T14:56:16.294623272Z + digest: sha256:240b5bcc2bafd450912d2da2be15e62bc6de2cf839823ae4bf94d4f392b451dc +# created: 2023-06-03T21:25:37.968717478Z diff --git a/packages/db-dtypes/.kokoro/requirements.txt b/packages/db-dtypes/.kokoro/requirements.txt index 3b8d7ee81848..c7929db6d152 100644 --- a/packages/db-dtypes/.kokoro/requirements.txt +++ b/packages/db-dtypes/.kokoro/requirements.txt @@ -113,28 +113,26 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==39.0.1 \ - --hash=sha256:0f8da300b5c8af9f98111ffd512910bc792b4c77392a9523624680f7956a99d4 \ - --hash=sha256:35f7c7d015d474f4011e859e93e789c87d21f6f4880ebdc29896a60403328f1f \ - --hash=sha256:5aa67414fcdfa22cf052e640cb5ddc461924a045cacf325cd164e65312d99502 \ - --hash=sha256:5d2d8b87a490bfcd407ed9d49093793d0f75198a35e6eb1a923ce1ee86c62b41 \ - --hash=sha256:6687ef6d0a6497e2b58e7c5b852b53f62142cfa7cd1555795758934da363a965 \ - --hash=sha256:6f8ba7f0328b79f08bdacc3e4e66fb4d7aab0c3584e0bd41328dce5262e26b2e \ - --hash=sha256:706843b48f9a3f9b9911979761c91541e3d90db1ca905fd63fee540a217698bc \ - --hash=sha256:807ce09d4434881ca3a7594733669bd834f5b2c6d5c7e36f8c00f691887042ad \ - --hash=sha256:83e17b26de248c33f3acffb922748151d71827d6021d98c70e6c1a25ddd78505 \ - --hash=sha256:96f1157a7c08b5b189b16b47bc9db2332269d6680a196341bf30046330d15388 \ - --hash=sha256:aec5a6c9864be7df2240c382740fcf3b96928c46604eaa7f3091f58b878c0bb6 \ - --hash=sha256:b0afd054cd42f3d213bf82c629efb1ee5f22eba35bf0eec88ea9ea7304f511a2 \ - --hash=sha256:ced4e447ae29ca194449a3f1ce132ded8fcab06971ef5f618605aacaa612beac \ - --hash=sha256:d1f6198ee6d9148405e49887803907fe8962a23e6c6f83ea7d98f1c0de375695 \ - --hash=sha256:e124352fd3db36a9d4a21c1aa27fd5d051e621845cb87fb851c08f4f75ce8be6 \ - --hash=sha256:e422abdec8b5fa8462aa016786680720d78bdce7a30c652b7fadf83a4ba35336 \ - --hash=sha256:ef8b72fa70b348724ff1218267e7f7375b8de4e8194d1636ee60510aae104cd0 \ - --hash=sha256:f0c64d1bd842ca2633e74a1a28033d139368ad959872533b1bab8c80e8240a0c \ - --hash=sha256:f24077a3b5298a5a06a8e0536e3ea9ec60e4c7ac486755e5fb6e6ea9b3500106 \ - --hash=sha256:fdd188c8a6ef8769f148f88f859884507b954cc64db6b52f66ef199bb9ad660a \ - --hash=sha256:fe913f20024eb2cb2f323e42a64bdf2911bb9738a15dba7d3cce48151034e3a8 +cryptography==41.0.0 \ + --hash=sha256:0ddaee209d1cf1f180f1efa338a68c4621154de0afaef92b89486f5f96047c55 \ + --hash=sha256:14754bcdae909d66ff24b7b5f166d69340ccc6cb15731670435efd5719294895 \ + --hash=sha256:344c6de9f8bda3c425b3a41b319522ba3208551b70c2ae00099c205f0d9fd3be \ + --hash=sha256:34d405ea69a8b34566ba3dfb0521379b210ea5d560fafedf9f800a9a94a41928 \ + --hash=sha256:3680248309d340fda9611498a5319b0193a8dbdb73586a1acf8109d06f25b92d \ + --hash=sha256:3c5ef25d060c80d6d9f7f9892e1d41bb1c79b78ce74805b8cb4aa373cb7d5ec8 \ + --hash=sha256:4ab14d567f7bbe7f1cdff1c53d5324ed4d3fc8bd17c481b395db224fb405c237 \ + --hash=sha256:5c1f7293c31ebc72163a9a0df246f890d65f66b4a40d9ec80081969ba8c78cc9 \ + --hash=sha256:6b71f64beeea341c9b4f963b48ee3b62d62d57ba93eb120e1196b31dc1025e78 \ + --hash=sha256:7d92f0248d38faa411d17f4107fc0bce0c42cae0b0ba5415505df72d751bf62d \ + --hash=sha256:8362565b3835ceacf4dc8f3b56471a2289cf51ac80946f9087e66dc283a810e0 \ + --hash=sha256:84a165379cb9d411d58ed739e4af3396e544eac190805a54ba2e0322feb55c46 \ + --hash=sha256:88ff107f211ea696455ea8d911389f6d2b276aabf3231bf72c8853d22db755c5 \ + --hash=sha256:9f65e842cb02550fac96536edb1d17f24c0a338fd84eaf582be25926e993dde4 \ + --hash=sha256:a4fc68d1c5b951cfb72dfd54702afdbbf0fb7acdc9b7dc4301bbf2225a27714d \ + --hash=sha256:b7f2f5c525a642cecad24ee8670443ba27ac1fab81bba4cc24c7b6b41f2d0c75 \ + --hash=sha256:b846d59a8d5a9ba87e2c3d757ca019fa576793e8758174d3868aecb88d6fc8eb \ + --hash=sha256:bf8fc66012ca857d62f6a347007e166ed59c0bc150cefa49f28376ebe7d992a2 \ + --hash=sha256:f5d0bf9b252f30a31664b6f64432b4730bb7038339bd18b1fafe129cfc2be9be # via # gcp-releasetool # secretstorage From eb94270e9af9620d3fa801a8f1e75cf3e47fad01 Mon Sep 17 00:00:00 2001 From: Chalmer Lowe Date: Mon, 12 Jun 2023 14:36:57 -0400 Subject: [PATCH 111/210] fix: adds xfail marks to tests that are known to fail (#189) * fix: adds xfail marks to tests that are known to fail * Update tests/compliance/date/test_date_compliance.py * Update tests/compliance/date/test_date_compliance.py --- .../tests/compliance/date/test_date_compliance.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/packages/db-dtypes/tests/compliance/date/test_date_compliance.py b/packages/db-dtypes/tests/compliance/date/test_date_compliance.py index e19caf70491d..59c09689dbc0 100644 --- a/packages/db-dtypes/tests/compliance/date/test_date_compliance.py +++ b/packages/db-dtypes/tests/compliance/date/test_date_compliance.py @@ -90,6 +90,18 @@ def test_value_counts(self, all_data, dropna): self.assert_series_equal(result, expected) + def test_diff(self): + pytest.xfail( + reason="""Causes a breakage in the compliance test suite. Needs + further investigation. See issues 182, 183, 185.""" + ) + + def test_hash_pandas_object(self): + pytest.xfail( + reason="""Causes a breakage in the compliance test suite. Needs + further investigation. See issues 182, 183, 185.""" + ) + class TestParsing(base.BaseParsingTests): pass From 1d2b27b49c1678d1f167b61735be3c55ced5a5d4 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 13 Jun 2023 16:51:23 +0200 Subject: [PATCH 112/210] chore(deps): update dependency pytest to v7.3.2 (#188) Co-authored-by: Anthonios Partheniou --- packages/db-dtypes/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/db-dtypes/samples/snippets/requirements-test.txt b/packages/db-dtypes/samples/snippets/requirements-test.txt index c4d04a08d024..56628493b8ce 100644 --- a/packages/db-dtypes/samples/snippets/requirements-test.txt +++ b/packages/db-dtypes/samples/snippets/requirements-test.txt @@ -1 +1 @@ -pytest==7.3.1 +pytest==7.3.2 From 3278bd42648761dd0534cf68ceac12a35b854c40 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 27 Jun 2023 10:14:01 -0400 Subject: [PATCH 113/210] chore: remove pinned Sphinx version [autoapprove] (#192) Source-Link: https://github.com/googleapis/synthtool/commit/909573ce9da2819eeb835909c795d29aea5c724e Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:ddf4551385d566771dc713090feb7b4c1164fb8a698fe52bbe7670b24236565b Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 4 ++-- packages/db-dtypes/noxfile.py | 3 +-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 02a4dedced74..1b3cb6c52663 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:240b5bcc2bafd450912d2da2be15e62bc6de2cf839823ae4bf94d4f392b451dc -# created: 2023-06-03T21:25:37.968717478Z + digest: sha256:ddf4551385d566771dc713090feb7b4c1164fb8a698fe52bbe7670b24236565b +# created: 2023-06-27T13:04:21.96690344Z diff --git a/packages/db-dtypes/noxfile.py b/packages/db-dtypes/noxfile.py index 6158b2523647..e4a134711dbb 100644 --- a/packages/db-dtypes/noxfile.py +++ b/packages/db-dtypes/noxfile.py @@ -401,10 +401,9 @@ def docfx(session): session.install("-e", ".") session.install( - "sphinx==4.0.1", + "gcp-sphinx-docfx-yaml", "alabaster", "recommonmark", - "gcp-sphinx-docfx-yaml", ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) From 583f2b73a92628128d6ee90039f6e856b421df51 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 29 Jun 2023 12:10:45 -0400 Subject: [PATCH 114/210] chore: store artifacts in placer (#193) Source-Link: https://github.com/googleapis/synthtool/commit/cb960373d12d20f8dc38beee2bf884d49627165e Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:2d816f26f728ac8b24248741e7d4c461c09764ef9f7be3684d557c9632e46dbd Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 4 ++-- packages/db-dtypes/.kokoro/release/common.cfg | 9 +++++++++ packages/db-dtypes/noxfile.py | 2 +- 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 1b3cb6c52663..98994f474104 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:ddf4551385d566771dc713090feb7b4c1164fb8a698fe52bbe7670b24236565b -# created: 2023-06-27T13:04:21.96690344Z + digest: sha256:2d816f26f728ac8b24248741e7d4c461c09764ef9f7be3684d557c9632e46dbd +# created: 2023-06-28T17:03:33.371210701Z diff --git a/packages/db-dtypes/.kokoro/release/common.cfg b/packages/db-dtypes/.kokoro/release/common.cfg index 44cc38429040..f3e607c295d2 100644 --- a/packages/db-dtypes/.kokoro/release/common.cfg +++ b/packages/db-dtypes/.kokoro/release/common.cfg @@ -38,3 +38,12 @@ env_vars: { key: "SECRET_MANAGER_KEYS" value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" } + +# Store the packages we uploaded to PyPI. That way, we have a record of exactly +# what we published, which we can use to generate SBOMs and attestations. +action { + define_artifacts { + regex: "github/python-db-dtypes-pandas/**/*.tar.gz" + strip_prefix: "github/python-db-dtypes-pandas" + } +} diff --git a/packages/db-dtypes/noxfile.py b/packages/db-dtypes/noxfile.py index e4a134711dbb..a304a94f2aab 100644 --- a/packages/db-dtypes/noxfile.py +++ b/packages/db-dtypes/noxfile.py @@ -475,6 +475,7 @@ def prerelease_deps(session): "grpcio!=1.52.0rc1", "grpcio-status", "google-api-core", + "google-auth", "proto-plus", "google-cloud-testutils", # dependencies of google-cloud-testutils" @@ -487,7 +488,6 @@ def prerelease_deps(session): # Remaining dependencies other_deps = [ "requests", - "google-auth", ] session.install(*other_deps) From 3c3d38a91b34878a4a82ed8c8ed0f8ea18defb0d Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 5 Jul 2023 17:25:02 +0200 Subject: [PATCH 115/210] chore(deps): update dependency pytest to v7.4.0 (#191) Co-authored-by: Anthonios Partheniou --- packages/db-dtypes/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/db-dtypes/samples/snippets/requirements-test.txt b/packages/db-dtypes/samples/snippets/requirements-test.txt index 56628493b8ce..70613be0cfe4 100644 --- a/packages/db-dtypes/samples/snippets/requirements-test.txt +++ b/packages/db-dtypes/samples/snippets/requirements-test.txt @@ -1 +1 @@ -pytest==7.3.2 +pytest==7.4.0 From 51176cce18309784a494ad9e8ea523ce1aa8b04a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 17 Jul 2023 11:46:34 -0400 Subject: [PATCH 116/210] build(deps): [autoapprove] bump cryptography from 41.0.0 to 41.0.2 (#194) Source-Link: https://github.com/googleapis/synthtool/commit/d6103f4a3540ba60f633a9e25c37ec5fe7e6286d Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:39f0f3f2be02ef036e297e376fe3b6256775576da8a6ccb1d5eeb80f4c8bf8fb Co-authored-by: Owl Bot --- packages/db-dtypes/.coveragerc | 2 +- packages/db-dtypes/.flake8 | 2 +- packages/db-dtypes/.github/.OwlBot.lock.yaml | 4 +- packages/db-dtypes/.github/auto-label.yaml | 2 +- packages/db-dtypes/.kokoro/build.sh | 2 +- .../db-dtypes/.kokoro/docker/docs/Dockerfile | 2 +- .../db-dtypes/.kokoro/populate-secrets.sh | 2 +- packages/db-dtypes/.kokoro/publish-docs.sh | 2 +- packages/db-dtypes/.kokoro/release.sh | 2 +- packages/db-dtypes/.kokoro/requirements.txt | 44 ++++++++++--------- .../.kokoro/test-samples-against-head.sh | 2 +- .../db-dtypes/.kokoro/test-samples-impl.sh | 2 +- packages/db-dtypes/.kokoro/test-samples.sh | 2 +- packages/db-dtypes/.kokoro/trampoline.sh | 2 +- packages/db-dtypes/.kokoro/trampoline_v2.sh | 2 +- packages/db-dtypes/.pre-commit-config.yaml | 2 +- packages/db-dtypes/.trampolinerc | 4 +- packages/db-dtypes/MANIFEST.in | 2 +- packages/db-dtypes/docs/conf.py | 2 +- packages/db-dtypes/noxfile.py | 3 +- packages/db-dtypes/scripts/decrypt-secrets.sh | 2 +- .../scripts/readme-gen/readme_gen.py | 18 ++++---- packages/db-dtypes/setup.cfg | 2 +- 23 files changed, 56 insertions(+), 53 deletions(-) diff --git a/packages/db-dtypes/.coveragerc b/packages/db-dtypes/.coveragerc index 0f8f9058e323..24e6fe932bed 100644 --- a/packages/db-dtypes/.coveragerc +++ b/packages/db-dtypes/.coveragerc @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/db-dtypes/.flake8 b/packages/db-dtypes/.flake8 index 2e438749863d..87f6e408c47d 100644 --- a/packages/db-dtypes/.flake8 +++ b/packages/db-dtypes/.flake8 @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 98994f474104..ae4a522b9e5f 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:2d816f26f728ac8b24248741e7d4c461c09764ef9f7be3684d557c9632e46dbd -# created: 2023-06-28T17:03:33.371210701Z + digest: sha256:39f0f3f2be02ef036e297e376fe3b6256775576da8a6ccb1d5eeb80f4c8bf8fb +# created: 2023-07-17T15:20:13.819193964Z diff --git a/packages/db-dtypes/.github/auto-label.yaml b/packages/db-dtypes/.github/auto-label.yaml index 41bff0b5375a..b2016d119b40 100644 --- a/packages/db-dtypes/.github/auto-label.yaml +++ b/packages/db-dtypes/.github/auto-label.yaml @@ -1,4 +1,4 @@ -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/db-dtypes/.kokoro/build.sh b/packages/db-dtypes/.kokoro/build.sh index 9f80c3478aa8..41c765dea85c 100755 --- a/packages/db-dtypes/.kokoro/build.sh +++ b/packages/db-dtypes/.kokoro/build.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2018 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/db-dtypes/.kokoro/docker/docs/Dockerfile b/packages/db-dtypes/.kokoro/docker/docs/Dockerfile index f8137d0ae497..8e39a2cc438d 100644 --- a/packages/db-dtypes/.kokoro/docker/docs/Dockerfile +++ b/packages/db-dtypes/.kokoro/docker/docs/Dockerfile @@ -1,4 +1,4 @@ -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/db-dtypes/.kokoro/populate-secrets.sh b/packages/db-dtypes/.kokoro/populate-secrets.sh index f52514257ef0..6f3972140e80 100755 --- a/packages/db-dtypes/.kokoro/populate-secrets.sh +++ b/packages/db-dtypes/.kokoro/populate-secrets.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2020 Google LLC. +# Copyright 2023 Google LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/db-dtypes/.kokoro/publish-docs.sh b/packages/db-dtypes/.kokoro/publish-docs.sh index 1c4d62370042..9eafe0be3bba 100755 --- a/packages/db-dtypes/.kokoro/publish-docs.sh +++ b/packages/db-dtypes/.kokoro/publish-docs.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/db-dtypes/.kokoro/release.sh b/packages/db-dtypes/.kokoro/release.sh index 2cbba13c8091..84a42d909c0c 100755 --- a/packages/db-dtypes/.kokoro/release.sh +++ b/packages/db-dtypes/.kokoro/release.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/db-dtypes/.kokoro/requirements.txt b/packages/db-dtypes/.kokoro/requirements.txt index c7929db6d152..67d70a110897 100644 --- a/packages/db-dtypes/.kokoro/requirements.txt +++ b/packages/db-dtypes/.kokoro/requirements.txt @@ -113,26 +113,30 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==41.0.0 \ - --hash=sha256:0ddaee209d1cf1f180f1efa338a68c4621154de0afaef92b89486f5f96047c55 \ - --hash=sha256:14754bcdae909d66ff24b7b5f166d69340ccc6cb15731670435efd5719294895 \ - --hash=sha256:344c6de9f8bda3c425b3a41b319522ba3208551b70c2ae00099c205f0d9fd3be \ - --hash=sha256:34d405ea69a8b34566ba3dfb0521379b210ea5d560fafedf9f800a9a94a41928 \ - --hash=sha256:3680248309d340fda9611498a5319b0193a8dbdb73586a1acf8109d06f25b92d \ - --hash=sha256:3c5ef25d060c80d6d9f7f9892e1d41bb1c79b78ce74805b8cb4aa373cb7d5ec8 \ - --hash=sha256:4ab14d567f7bbe7f1cdff1c53d5324ed4d3fc8bd17c481b395db224fb405c237 \ - --hash=sha256:5c1f7293c31ebc72163a9a0df246f890d65f66b4a40d9ec80081969ba8c78cc9 \ - --hash=sha256:6b71f64beeea341c9b4f963b48ee3b62d62d57ba93eb120e1196b31dc1025e78 \ - --hash=sha256:7d92f0248d38faa411d17f4107fc0bce0c42cae0b0ba5415505df72d751bf62d \ - --hash=sha256:8362565b3835ceacf4dc8f3b56471a2289cf51ac80946f9087e66dc283a810e0 \ - --hash=sha256:84a165379cb9d411d58ed739e4af3396e544eac190805a54ba2e0322feb55c46 \ - --hash=sha256:88ff107f211ea696455ea8d911389f6d2b276aabf3231bf72c8853d22db755c5 \ - --hash=sha256:9f65e842cb02550fac96536edb1d17f24c0a338fd84eaf582be25926e993dde4 \ - --hash=sha256:a4fc68d1c5b951cfb72dfd54702afdbbf0fb7acdc9b7dc4301bbf2225a27714d \ - --hash=sha256:b7f2f5c525a642cecad24ee8670443ba27ac1fab81bba4cc24c7b6b41f2d0c75 \ - --hash=sha256:b846d59a8d5a9ba87e2c3d757ca019fa576793e8758174d3868aecb88d6fc8eb \ - --hash=sha256:bf8fc66012ca857d62f6a347007e166ed59c0bc150cefa49f28376ebe7d992a2 \ - --hash=sha256:f5d0bf9b252f30a31664b6f64432b4730bb7038339bd18b1fafe129cfc2be9be +cryptography==41.0.2 \ + --hash=sha256:01f1d9e537f9a15b037d5d9ee442b8c22e3ae11ce65ea1f3316a41c78756b711 \ + --hash=sha256:079347de771f9282fbfe0e0236c716686950c19dee1b76240ab09ce1624d76d7 \ + --hash=sha256:182be4171f9332b6741ee818ec27daff9fb00349f706629f5cbf417bd50e66fd \ + --hash=sha256:192255f539d7a89f2102d07d7375b1e0a81f7478925b3bc2e0549ebf739dae0e \ + --hash=sha256:2a034bf7d9ca894720f2ec1d8b7b5832d7e363571828037f9e0c4f18c1b58a58 \ + --hash=sha256:342f3767e25876751e14f8459ad85e77e660537ca0a066e10e75df9c9e9099f0 \ + --hash=sha256:439c3cc4c0d42fa999b83ded80a9a1fb54d53c58d6e59234cfe97f241e6c781d \ + --hash=sha256:49c3222bb8f8e800aead2e376cbef687bc9e3cb9b58b29a261210456a7783d83 \ + --hash=sha256:674b669d5daa64206c38e507808aae49904c988fa0a71c935e7006a3e1e83831 \ + --hash=sha256:7a9a3bced53b7f09da251685224d6a260c3cb291768f54954e28f03ef14e3766 \ + --hash=sha256:7af244b012711a26196450d34f483357e42aeddb04128885d95a69bd8b14b69b \ + --hash=sha256:7d230bf856164de164ecb615ccc14c7fc6de6906ddd5b491f3af90d3514c925c \ + --hash=sha256:84609ade00a6ec59a89729e87a503c6e36af98ddcd566d5f3be52e29ba993182 \ + --hash=sha256:9a6673c1828db6270b76b22cc696f40cde9043eb90373da5c2f8f2158957f42f \ + --hash=sha256:9b6d717393dbae53d4e52684ef4f022444fc1cce3c48c38cb74fca29e1f08eaa \ + --hash=sha256:9c3fe6534d59d071ee82081ca3d71eed3210f76ebd0361798c74abc2bcf347d4 \ + --hash=sha256:a719399b99377b218dac6cf547b6ec54e6ef20207b6165126a280b0ce97e0d2a \ + --hash=sha256:b332cba64d99a70c1e0836902720887fb4529ea49ea7f5462cf6640e095e11d2 \ + --hash=sha256:d124682c7a23c9764e54ca9ab5b308b14b18eba02722b8659fb238546de83a76 \ + --hash=sha256:d73f419a56d74fef257955f51b18d046f3506270a5fd2ac5febbfa259d6c0fa5 \ + --hash=sha256:f0dc40e6f7aa37af01aba07277d3d64d5a03dc66d682097541ec4da03cc140ee \ + --hash=sha256:f14ad275364c8b4e525d018f6716537ae7b6d369c094805cae45300847e0894f \ + --hash=sha256:f772610fe364372de33d76edcd313636a25684edb94cee53fd790195f5989d14 # via # gcp-releasetool # secretstorage diff --git a/packages/db-dtypes/.kokoro/test-samples-against-head.sh b/packages/db-dtypes/.kokoro/test-samples-against-head.sh index ba3a707b040c..63ac41dfae1d 100755 --- a/packages/db-dtypes/.kokoro/test-samples-against-head.sh +++ b/packages/db-dtypes/.kokoro/test-samples-against-head.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/db-dtypes/.kokoro/test-samples-impl.sh b/packages/db-dtypes/.kokoro/test-samples-impl.sh index 2c6500cae0b9..5a0f5fab6a89 100755 --- a/packages/db-dtypes/.kokoro/test-samples-impl.sh +++ b/packages/db-dtypes/.kokoro/test-samples-impl.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2021 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/db-dtypes/.kokoro/test-samples.sh b/packages/db-dtypes/.kokoro/test-samples.sh index 11c042d342d7..50b35a48c190 100755 --- a/packages/db-dtypes/.kokoro/test-samples.sh +++ b/packages/db-dtypes/.kokoro/test-samples.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/db-dtypes/.kokoro/trampoline.sh b/packages/db-dtypes/.kokoro/trampoline.sh index f39236e943a8..d85b1f267693 100755 --- a/packages/db-dtypes/.kokoro/trampoline.sh +++ b/packages/db-dtypes/.kokoro/trampoline.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2017 Google Inc. +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/db-dtypes/.kokoro/trampoline_v2.sh b/packages/db-dtypes/.kokoro/trampoline_v2.sh index 4af6cdc26dbc..59a7cf3a9373 100755 --- a/packages/db-dtypes/.kokoro/trampoline_v2.sh +++ b/packages/db-dtypes/.kokoro/trampoline_v2.sh @@ -1,5 +1,5 @@ #!/usr/bin/env bash -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/db-dtypes/.pre-commit-config.yaml b/packages/db-dtypes/.pre-commit-config.yaml index 5405cc8ff1f3..9e3898fd1c12 100644 --- a/packages/db-dtypes/.pre-commit-config.yaml +++ b/packages/db-dtypes/.pre-commit-config.yaml @@ -1,4 +1,4 @@ -# Copyright 2021 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/db-dtypes/.trampolinerc b/packages/db-dtypes/.trampolinerc index 0eee72ab62aa..a7dfeb42c6d0 100644 --- a/packages/db-dtypes/.trampolinerc +++ b/packages/db-dtypes/.trampolinerc @@ -1,4 +1,4 @@ -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,8 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -# Template for .trampolinerc - # Add required env vars here. required_envvars+=( ) diff --git a/packages/db-dtypes/MANIFEST.in b/packages/db-dtypes/MANIFEST.in index e783f4c6209b..e0a66705318e 100644 --- a/packages/db-dtypes/MANIFEST.in +++ b/packages/db-dtypes/MANIFEST.in @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/db-dtypes/docs/conf.py b/packages/db-dtypes/docs/conf.py index 0565c618821c..ff2f01a39d07 100644 --- a/packages/db-dtypes/docs/conf.py +++ b/packages/db-dtypes/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2021 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/db-dtypes/noxfile.py b/packages/db-dtypes/noxfile.py index a304a94f2aab..74894b727caa 100644 --- a/packages/db-dtypes/noxfile.py +++ b/packages/db-dtypes/noxfile.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -496,6 +496,7 @@ def prerelease_deps(session): "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" ) session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") session.run("py.test", "tests/unit") diff --git a/packages/db-dtypes/scripts/decrypt-secrets.sh b/packages/db-dtypes/scripts/decrypt-secrets.sh index 21f6d2a26d90..0018b421ddf8 100755 --- a/packages/db-dtypes/scripts/decrypt-secrets.sh +++ b/packages/db-dtypes/scripts/decrypt-secrets.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2015 Google Inc. All rights reserved. +# Copyright 2023 Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/db-dtypes/scripts/readme-gen/readme_gen.py b/packages/db-dtypes/scripts/readme-gen/readme_gen.py index 91b59676bfc7..1acc119835b5 100644 --- a/packages/db-dtypes/scripts/readme-gen/readme_gen.py +++ b/packages/db-dtypes/scripts/readme-gen/readme_gen.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright 2016 Google Inc +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -33,17 +33,17 @@ autoescape=True, ) -README_TMPL = jinja_env.get_template('README.tmpl.rst') +README_TMPL = jinja_env.get_template("README.tmpl.rst") def get_help(file): - return subprocess.check_output(['python', file, '--help']).decode() + return subprocess.check_output(["python", file, "--help"]).decode() def main(): parser = argparse.ArgumentParser() - parser.add_argument('source') - parser.add_argument('--destination', default='README.rst') + parser.add_argument("source") + parser.add_argument("--destination", default="README.rst") args = parser.parse_args() @@ -51,9 +51,9 @@ def main(): root = os.path.dirname(source) destination = os.path.join(root, args.destination) - jinja_env.globals['get_help'] = get_help + jinja_env.globals["get_help"] = get_help - with io.open(source, 'r') as f: + with io.open(source, "r") as f: config = yaml.load(f) # This allows get_help to execute in the right directory. @@ -61,9 +61,9 @@ def main(): output = README_TMPL.render(config) - with io.open(destination, 'w') as f: + with io.open(destination, "w") as f: f.write(output) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/packages/db-dtypes/setup.cfg b/packages/db-dtypes/setup.cfg index c3a2b39f6528..052350089505 100644 --- a/packages/db-dtypes/setup.cfg +++ b/packages/db-dtypes/setup.cfg @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From 08f8eba7e1cf258e8f8c7d8296ec97c059cbb063 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 21 Jul 2023 09:44:51 -0400 Subject: [PATCH 117/210] build(deps): [autoapprove] bump pygments from 2.13.0 to 2.15.0 (#195) Source-Link: https://github.com/googleapis/synthtool/commit/eaef28efd179e6eeb9f4e9bf697530d074a6f3b9 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:f8ca7655fa8a449cadcabcbce4054f593dcbae7aeeab34aa3fcc8b5cf7a93c9e Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 4 ++-- packages/db-dtypes/.kokoro/requirements.txt | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index ae4a522b9e5f..17c21d96d654 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:39f0f3f2be02ef036e297e376fe3b6256775576da8a6ccb1d5eeb80f4c8bf8fb -# created: 2023-07-17T15:20:13.819193964Z + digest: sha256:f8ca7655fa8a449cadcabcbce4054f593dcbae7aeeab34aa3fcc8b5cf7a93c9e +# created: 2023-07-21T02:12:46.49799314Z diff --git a/packages/db-dtypes/.kokoro/requirements.txt b/packages/db-dtypes/.kokoro/requirements.txt index 67d70a110897..b563eb284459 100644 --- a/packages/db-dtypes/.kokoro/requirements.txt +++ b/packages/db-dtypes/.kokoro/requirements.txt @@ -396,9 +396,9 @@ pycparser==2.21 \ --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 # via cffi -pygments==2.13.0 \ - --hash=sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1 \ - --hash=sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42 +pygments==2.15.0 \ + --hash=sha256:77a3299119af881904cd5ecd1ac6a66214b6e9bed1f2db16993b54adede64094 \ + --hash=sha256:f7e36cffc4c517fbc252861b9a6e4644ca0e5abadf9a113c72d1358ad09b9500 # via # readme-renderer # rich From c687d81f560939efc75bf244f1ca6bdae0dad06d Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 26 Jul 2023 07:04:54 -0400 Subject: [PATCH 118/210] build(deps): [autoapprove] bump certifi from 2022.12.7 to 2023.7.22 (#196) Source-Link: https://github.com/googleapis/synthtool/commit/395d53adeeacfca00b73abf197f65f3c17c8f1e9 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:6c1cbc75c74b8bdd71dada2fa1677e9d6d78a889e9a70ee75b93d1d0543f96e1 Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 4 ++-- packages/db-dtypes/.kokoro/requirements.txt | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 17c21d96d654..0ddd0e4d1873 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:f8ca7655fa8a449cadcabcbce4054f593dcbae7aeeab34aa3fcc8b5cf7a93c9e -# created: 2023-07-21T02:12:46.49799314Z + digest: sha256:6c1cbc75c74b8bdd71dada2fa1677e9d6d78a889e9a70ee75b93d1d0543f96e1 +# created: 2023-07-25T21:01:10.396410762Z diff --git a/packages/db-dtypes/.kokoro/requirements.txt b/packages/db-dtypes/.kokoro/requirements.txt index b563eb284459..76d9bba0f7d0 100644 --- a/packages/db-dtypes/.kokoro/requirements.txt +++ b/packages/db-dtypes/.kokoro/requirements.txt @@ -20,9 +20,9 @@ cachetools==5.2.0 \ --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db # via google-auth -certifi==2022.12.7 \ - --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \ - --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18 +certifi==2023.7.22 \ + --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ + --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9 # via requests cffi==1.15.1 \ --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ From ee65a6695bf6835afcd28a219bc59e5bfe506133 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 3 Aug 2023 12:27:45 -0400 Subject: [PATCH 119/210] build: [autoapprove] bump cryptography from 41.0.2 to 41.0.3 (#198) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * build: [autoapprove] bump cryptography from 41.0.2 to 41.0.3 Source-Link: https://github.com/googleapis/synthtool/commit/352b9d4c068ce7c05908172af128b294073bf53c Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:3e3800bb100af5d7f9e810d48212b37812c1856d20ffeafb99ebe66461b61fc7 * pin flake8 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * pin flake8 --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 4 +- packages/db-dtypes/.kokoro/requirements.txt | 48 ++++++++++---------- packages/db-dtypes/.pre-commit-config.yaml | 2 +- packages/db-dtypes/noxfile.py | 6 ++- packages/db-dtypes/owlbot.py | 8 ++++ 5 files changed, 40 insertions(+), 28 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 0ddd0e4d1873..a3da1b0d4cd3 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:6c1cbc75c74b8bdd71dada2fa1677e9d6d78a889e9a70ee75b93d1d0543f96e1 -# created: 2023-07-25T21:01:10.396410762Z + digest: sha256:3e3800bb100af5d7f9e810d48212b37812c1856d20ffeafb99ebe66461b61fc7 +# created: 2023-08-02T10:53:29.114535628Z diff --git a/packages/db-dtypes/.kokoro/requirements.txt b/packages/db-dtypes/.kokoro/requirements.txt index 76d9bba0f7d0..029bd342de94 100644 --- a/packages/db-dtypes/.kokoro/requirements.txt +++ b/packages/db-dtypes/.kokoro/requirements.txt @@ -113,30 +113,30 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==41.0.2 \ - --hash=sha256:01f1d9e537f9a15b037d5d9ee442b8c22e3ae11ce65ea1f3316a41c78756b711 \ - --hash=sha256:079347de771f9282fbfe0e0236c716686950c19dee1b76240ab09ce1624d76d7 \ - --hash=sha256:182be4171f9332b6741ee818ec27daff9fb00349f706629f5cbf417bd50e66fd \ - --hash=sha256:192255f539d7a89f2102d07d7375b1e0a81f7478925b3bc2e0549ebf739dae0e \ - --hash=sha256:2a034bf7d9ca894720f2ec1d8b7b5832d7e363571828037f9e0c4f18c1b58a58 \ - --hash=sha256:342f3767e25876751e14f8459ad85e77e660537ca0a066e10e75df9c9e9099f0 \ - --hash=sha256:439c3cc4c0d42fa999b83ded80a9a1fb54d53c58d6e59234cfe97f241e6c781d \ - --hash=sha256:49c3222bb8f8e800aead2e376cbef687bc9e3cb9b58b29a261210456a7783d83 \ - --hash=sha256:674b669d5daa64206c38e507808aae49904c988fa0a71c935e7006a3e1e83831 \ - --hash=sha256:7a9a3bced53b7f09da251685224d6a260c3cb291768f54954e28f03ef14e3766 \ - --hash=sha256:7af244b012711a26196450d34f483357e42aeddb04128885d95a69bd8b14b69b \ - --hash=sha256:7d230bf856164de164ecb615ccc14c7fc6de6906ddd5b491f3af90d3514c925c \ - --hash=sha256:84609ade00a6ec59a89729e87a503c6e36af98ddcd566d5f3be52e29ba993182 \ - --hash=sha256:9a6673c1828db6270b76b22cc696f40cde9043eb90373da5c2f8f2158957f42f \ - --hash=sha256:9b6d717393dbae53d4e52684ef4f022444fc1cce3c48c38cb74fca29e1f08eaa \ - --hash=sha256:9c3fe6534d59d071ee82081ca3d71eed3210f76ebd0361798c74abc2bcf347d4 \ - --hash=sha256:a719399b99377b218dac6cf547b6ec54e6ef20207b6165126a280b0ce97e0d2a \ - --hash=sha256:b332cba64d99a70c1e0836902720887fb4529ea49ea7f5462cf6640e095e11d2 \ - --hash=sha256:d124682c7a23c9764e54ca9ab5b308b14b18eba02722b8659fb238546de83a76 \ - --hash=sha256:d73f419a56d74fef257955f51b18d046f3506270a5fd2ac5febbfa259d6c0fa5 \ - --hash=sha256:f0dc40e6f7aa37af01aba07277d3d64d5a03dc66d682097541ec4da03cc140ee \ - --hash=sha256:f14ad275364c8b4e525d018f6716537ae7b6d369c094805cae45300847e0894f \ - --hash=sha256:f772610fe364372de33d76edcd313636a25684edb94cee53fd790195f5989d14 +cryptography==41.0.3 \ + --hash=sha256:0d09fb5356f975974dbcb595ad2d178305e5050656affb7890a1583f5e02a306 \ + --hash=sha256:23c2d778cf829f7d0ae180600b17e9fceea3c2ef8b31a99e3c694cbbf3a24b84 \ + --hash=sha256:3fb248989b6363906827284cd20cca63bb1a757e0a2864d4c1682a985e3dca47 \ + --hash=sha256:41d7aa7cdfded09b3d73a47f429c298e80796c8e825ddfadc84c8a7f12df212d \ + --hash=sha256:42cb413e01a5d36da9929baa9d70ca90d90b969269e5a12d39c1e0d475010116 \ + --hash=sha256:4c2f0d35703d61002a2bbdcf15548ebb701cfdd83cdc12471d2bae80878a4207 \ + --hash=sha256:4fd871184321100fb400d759ad0cddddf284c4b696568204d281c902fc7b0d81 \ + --hash=sha256:5259cb659aa43005eb55a0e4ff2c825ca111a0da1814202c64d28a985d33b087 \ + --hash=sha256:57a51b89f954f216a81c9d057bf1a24e2f36e764a1ca9a501a6964eb4a6800dd \ + --hash=sha256:652627a055cb52a84f8c448185922241dd5217443ca194d5739b44612c5e6507 \ + --hash=sha256:67e120e9a577c64fe1f611e53b30b3e69744e5910ff3b6e97e935aeb96005858 \ + --hash=sha256:6af1c6387c531cd364b72c28daa29232162010d952ceb7e5ca8e2827526aceae \ + --hash=sha256:6d192741113ef5e30d89dcb5b956ef4e1578f304708701b8b73d38e3e1461f34 \ + --hash=sha256:7efe8041897fe7a50863e51b77789b657a133c75c3b094e51b5e4b5cec7bf906 \ + --hash=sha256:84537453d57f55a50a5b6835622ee405816999a7113267739a1b4581f83535bd \ + --hash=sha256:8f09daa483aedea50d249ef98ed500569841d6498aa9c9f4b0531b9964658922 \ + --hash=sha256:95dd7f261bb76948b52a5330ba5202b91a26fbac13ad0e9fc8a3ac04752058c7 \ + --hash=sha256:a74fbcdb2a0d46fe00504f571a2a540532f4c188e6ccf26f1f178480117b33c4 \ + --hash=sha256:a983e441a00a9d57a4d7c91b3116a37ae602907a7618b882c8013b5762e80574 \ + --hash=sha256:ab8de0d091acbf778f74286f4989cf3d1528336af1b59f3e5d2ebca8b5fe49e1 \ + --hash=sha256:aeb57c421b34af8f9fe830e1955bf493a86a7996cc1338fe41b30047d16e962c \ + --hash=sha256:ce785cf81a7bdade534297ef9e490ddff800d956625020ab2ec2780a556c313e \ + --hash=sha256:d0d651aa754ef58d75cec6edfbd21259d93810b73f6ec246436a21b7841908de # via # gcp-releasetool # secretstorage diff --git a/packages/db-dtypes/.pre-commit-config.yaml b/packages/db-dtypes/.pre-commit-config.yaml index 9e3898fd1c12..19409cbd37a4 100644 --- a/packages/db-dtypes/.pre-commit-config.yaml +++ b/packages/db-dtypes/.pre-commit-config.yaml @@ -26,6 +26,6 @@ repos: hooks: - id: black - repo: https://github.com/pycqa/flake8 - rev: 3.9.2 + rev: 6.1.0 hooks: - id: flake8 diff --git a/packages/db-dtypes/noxfile.py b/packages/db-dtypes/noxfile.py index 74894b727caa..f338b6e43a65 100644 --- a/packages/db-dtypes/noxfile.py +++ b/packages/db-dtypes/noxfile.py @@ -26,6 +26,10 @@ import nox +# Pin flake8 to 6.0.0 +# See https://github.com/googleapis/python-db-dtypes-pandas/issues/199 +FLAKE8_VERSION = "flake8==6.0.0" + BLACK_VERSION = "black==22.3.0" ISORT_VERSION = "isort==5.10.1" LINT_PATHS = ["docs", "db_dtypes", "tests", "noxfile.py", "setup.py"] @@ -84,7 +88,7 @@ def lint(session): Returns a failure if the linters find linting errors or sufficiently serious code quality issues. """ - session.install("flake8", BLACK_VERSION) + session.install(FLAKE8_VERSION, BLACK_VERSION) session.run( "black", "--check", diff --git a/packages/db-dtypes/owlbot.py b/packages/db-dtypes/owlbot.py index 4b89096dfad5..edac6bf1c526 100644 --- a/packages/db-dtypes/owlbot.py +++ b/packages/db-dtypes/owlbot.py @@ -56,6 +56,14 @@ ["noxfile.py"], "--cov=google", "--cov=db_dtypes", ) +s.replace(["noxfile.py"], + """FLAKE8_VERSION = \"flake8==6.1.0\"""", + """# Pin flake8 to 6.0.0 +# See https://github.com/googleapis/python-db-dtypes-pandas/issues/199 +FLAKE8_VERSION = "flake8==6.0.0" +""" +) + # There are no system tests for this package. old_sessions = """ "unit", From 7ac6f6106e805d5c2a8c0abeed1a4bb48738c64a Mon Sep 17 00:00:00 2001 From: Chalmer Lowe Date: Thu, 31 Aug 2023 10:38:13 -0400 Subject: [PATCH 120/210] bug: add import and object reference due to upstream changes (#208) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Due to an upstream change in how a module was imported and referenced, we needed to update some of our compliance tests to match. Fixes #200 🦕 Fixes #201 🦕 Fixes #202 🦕 Fixes #203 🦕 Fixes #204 🦕 Fixes #205 🦕 Fixes #206 🦕 Fixes #207 🦕 --- .../db-dtypes/tests/compliance/date/test_date_compliance.py | 3 ++- .../db-dtypes/tests/compliance/time/test_time_compliance.py | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/packages/db-dtypes/tests/compliance/date/test_date_compliance.py b/packages/db-dtypes/tests/compliance/date/test_date_compliance.py index 59c09689dbc0..5c43287c8d9a 100644 --- a/packages/db-dtypes/tests/compliance/date/test_date_compliance.py +++ b/packages/db-dtypes/tests/compliance/date/test_date_compliance.py @@ -22,6 +22,7 @@ import pandas from pandas.tests.extension import base +import pandas._testing as tm import pytest import db_dtypes @@ -88,7 +89,7 @@ def test_value_counts(self, all_data, dropna): result = pandas.Series(all_data).value_counts(dropna=dropna).sort_index() expected = pandas.Series(other).value_counts(dropna=dropna).sort_index() - self.assert_series_equal(result, expected) + tm.assert_series_equal(result, expected) def test_diff(self): pytest.xfail( diff --git a/packages/db-dtypes/tests/compliance/time/test_time_compliance.py b/packages/db-dtypes/tests/compliance/time/test_time_compliance.py index ab1e050a03a4..3b36d164e22a 100644 --- a/packages/db-dtypes/tests/compliance/time/test_time_compliance.py +++ b/packages/db-dtypes/tests/compliance/time/test_time_compliance.py @@ -22,6 +22,7 @@ import pandas from pandas.tests.extension import base +import pandas._testing as tm import pytest import db_dtypes @@ -88,7 +89,7 @@ def test_value_counts(self, all_data, dropna): result = pandas.Series(all_data).value_counts(dropna=dropna).sort_index() expected = pandas.Series(other).value_counts(dropna=dropna).sort_index() - self.assert_series_equal(result, expected) + tm.assert_series_equal(result, expected) class TestParsing(base.BaseParsingTests): From 905ccc6cd5b974df6c62f186f038eeb0bdb9d29d Mon Sep 17 00:00:00 2001 From: Lingqing Gan Date: Wed, 20 Sep 2023 08:14:16 -0400 Subject: [PATCH 121/210] docs: update pandas extension link (#210) --- packages/db-dtypes/README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/db-dtypes/README.rst b/packages/db-dtypes/README.rst index 4288be59ec25..abf1e8741259 100644 --- a/packages/db-dtypes/README.rst +++ b/packages/db-dtypes/README.rst @@ -13,7 +13,7 @@ Pandas Data Types for SQL systems (BigQuery, Spanner) :target: https://pypi.org/project/db-dtypes/ .. |versions| image:: https://img.shields.io/pypi/pyversions/db-dtypes.svg :target: https://pypi.org/project/db-dtypes/ -.. _Pandas extension data types: https://pandas.pydata.org/pandas-docs/stable/ecosystem.html#ecosystem-extensions +.. _Pandas extension data types: https://pandas.pydata.org/docs/development/extending.html#extension-types .. _BigQuery: https://cloud.google.com/bigquery/docs/ .. _Library Documentation: https://googleapis.dev/python/db-dtypes/latest From f6ddbe1c67eea675e899d0936d07f95d857d3319 Mon Sep 17 00:00:00 2001 From: Lingqing Gan Date: Thu, 21 Sep 2023 08:40:29 -0400 Subject: [PATCH 122/210] bug: fixes flake8==6.1.0 bug, unpins flake8==6.0.0 (#211) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * bug: fixes flake8==6.1.0 bug * fix errors * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- packages/db-dtypes/db_dtypes/core.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/packages/db-dtypes/db_dtypes/core.py b/packages/db-dtypes/db_dtypes/core.py index e9ab4add7148..567b931658c4 100644 --- a/packages/db-dtypes/db_dtypes/core.py +++ b/packages/db-dtypes/db_dtypes/core.py @@ -90,10 +90,12 @@ def astype(self, dtype, copy=True): def _cmp_method(self, other, op): """Compare array values, for use in OpsMixin.""" - if is_scalar(other) and (pandas.isna(other) or type(other) == self.dtype.type): + if is_scalar(other) and ( + pandas.isna(other) or isinstance(other, self.dtype.type) + ): other = type(self)([other]) - if type(other) != type(self): + if type(other) is not type(self): return NotImplemented oshape = getattr(other, "shape", None) From 68b442950beb60de29b336fdbc3fd0b0e6dad8f5 Mon Sep 17 00:00:00 2001 From: Lingqing Gan Date: Thu, 21 Sep 2023 18:30:23 -0400 Subject: [PATCH 123/210] bug: remove flake8 pin in owlbot (#213) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * bug: remove flake8 pin in owlbot * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- packages/db-dtypes/noxfile.py | 5 +---- packages/db-dtypes/owlbot.py | 8 -------- 2 files changed, 1 insertion(+), 12 deletions(-) diff --git a/packages/db-dtypes/noxfile.py b/packages/db-dtypes/noxfile.py index f338b6e43a65..86a34d9199e5 100644 --- a/packages/db-dtypes/noxfile.py +++ b/packages/db-dtypes/noxfile.py @@ -26,10 +26,7 @@ import nox -# Pin flake8 to 6.0.0 -# See https://github.com/googleapis/python-db-dtypes-pandas/issues/199 -FLAKE8_VERSION = "flake8==6.0.0" - +FLAKE8_VERSION = "flake8==6.1.0" BLACK_VERSION = "black==22.3.0" ISORT_VERSION = "isort==5.10.1" LINT_PATHS = ["docs", "db_dtypes", "tests", "noxfile.py", "setup.py"] diff --git a/packages/db-dtypes/owlbot.py b/packages/db-dtypes/owlbot.py index edac6bf1c526..4b89096dfad5 100644 --- a/packages/db-dtypes/owlbot.py +++ b/packages/db-dtypes/owlbot.py @@ -56,14 +56,6 @@ ["noxfile.py"], "--cov=google", "--cov=db_dtypes", ) -s.replace(["noxfile.py"], - """FLAKE8_VERSION = \"flake8==6.1.0\"""", - """# Pin flake8 to 6.0.0 -# See https://github.com/googleapis/python-db-dtypes-pandas/issues/199 -FLAKE8_VERSION = "flake8==6.0.0" -""" -) - # There are no system tests for this package. old_sessions = """ "unit", From 7869ae94b93b55d1838ee06aaaca8686cf6707c2 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 6 Oct 2023 21:51:55 -0400 Subject: [PATCH 124/210] chore: [autoapprove] bump cryptography from 41.0.3 to 41.0.4 (#214) Source-Link: https://github.com/googleapis/synthtool/commit/dede53ff326079b457cfb1aae5bbdc82cbb51dc3 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:fac304457974bb530cc5396abd4ab25d26a469cd3bc97cbfb18c8d4324c584eb Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 4 +- packages/db-dtypes/.gitignore | 1 + packages/db-dtypes/.kokoro/requirements.txt | 49 ++++++++++---------- 3 files changed, 28 insertions(+), 26 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index a3da1b0d4cd3..a9bdb1b7ac0f 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:3e3800bb100af5d7f9e810d48212b37812c1856d20ffeafb99ebe66461b61fc7 -# created: 2023-08-02T10:53:29.114535628Z + digest: sha256:fac304457974bb530cc5396abd4ab25d26a469cd3bc97cbfb18c8d4324c584eb +# created: 2023-10-02T21:31:03.517640371Z diff --git a/packages/db-dtypes/.gitignore b/packages/db-dtypes/.gitignore index b4243ced74e4..d083ea1ddc3e 100644 --- a/packages/db-dtypes/.gitignore +++ b/packages/db-dtypes/.gitignore @@ -50,6 +50,7 @@ docs.metadata # Virtual environment env/ +venv/ # Test logs coverage.xml diff --git a/packages/db-dtypes/.kokoro/requirements.txt b/packages/db-dtypes/.kokoro/requirements.txt index 029bd342de94..96d593c8c82a 100644 --- a/packages/db-dtypes/.kokoro/requirements.txt +++ b/packages/db-dtypes/.kokoro/requirements.txt @@ -113,30 +113,30 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==41.0.3 \ - --hash=sha256:0d09fb5356f975974dbcb595ad2d178305e5050656affb7890a1583f5e02a306 \ - --hash=sha256:23c2d778cf829f7d0ae180600b17e9fceea3c2ef8b31a99e3c694cbbf3a24b84 \ - --hash=sha256:3fb248989b6363906827284cd20cca63bb1a757e0a2864d4c1682a985e3dca47 \ - --hash=sha256:41d7aa7cdfded09b3d73a47f429c298e80796c8e825ddfadc84c8a7f12df212d \ - --hash=sha256:42cb413e01a5d36da9929baa9d70ca90d90b969269e5a12d39c1e0d475010116 \ - --hash=sha256:4c2f0d35703d61002a2bbdcf15548ebb701cfdd83cdc12471d2bae80878a4207 \ - --hash=sha256:4fd871184321100fb400d759ad0cddddf284c4b696568204d281c902fc7b0d81 \ - --hash=sha256:5259cb659aa43005eb55a0e4ff2c825ca111a0da1814202c64d28a985d33b087 \ - --hash=sha256:57a51b89f954f216a81c9d057bf1a24e2f36e764a1ca9a501a6964eb4a6800dd \ - --hash=sha256:652627a055cb52a84f8c448185922241dd5217443ca194d5739b44612c5e6507 \ - --hash=sha256:67e120e9a577c64fe1f611e53b30b3e69744e5910ff3b6e97e935aeb96005858 \ - --hash=sha256:6af1c6387c531cd364b72c28daa29232162010d952ceb7e5ca8e2827526aceae \ - --hash=sha256:6d192741113ef5e30d89dcb5b956ef4e1578f304708701b8b73d38e3e1461f34 \ - --hash=sha256:7efe8041897fe7a50863e51b77789b657a133c75c3b094e51b5e4b5cec7bf906 \ - --hash=sha256:84537453d57f55a50a5b6835622ee405816999a7113267739a1b4581f83535bd \ - --hash=sha256:8f09daa483aedea50d249ef98ed500569841d6498aa9c9f4b0531b9964658922 \ - --hash=sha256:95dd7f261bb76948b52a5330ba5202b91a26fbac13ad0e9fc8a3ac04752058c7 \ - --hash=sha256:a74fbcdb2a0d46fe00504f571a2a540532f4c188e6ccf26f1f178480117b33c4 \ - --hash=sha256:a983e441a00a9d57a4d7c91b3116a37ae602907a7618b882c8013b5762e80574 \ - --hash=sha256:ab8de0d091acbf778f74286f4989cf3d1528336af1b59f3e5d2ebca8b5fe49e1 \ - --hash=sha256:aeb57c421b34af8f9fe830e1955bf493a86a7996cc1338fe41b30047d16e962c \ - --hash=sha256:ce785cf81a7bdade534297ef9e490ddff800d956625020ab2ec2780a556c313e \ - --hash=sha256:d0d651aa754ef58d75cec6edfbd21259d93810b73f6ec246436a21b7841908de +cryptography==41.0.4 \ + --hash=sha256:004b6ccc95943f6a9ad3142cfabcc769d7ee38a3f60fb0dddbfb431f818c3a67 \ + --hash=sha256:047c4603aeb4bbd8db2756e38f5b8bd7e94318c047cfe4efeb5d715e08b49311 \ + --hash=sha256:0d9409894f495d465fe6fda92cb70e8323e9648af912d5b9141d616df40a87b8 \ + --hash=sha256:23a25c09dfd0d9f28da2352503b23e086f8e78096b9fd585d1d14eca01613e13 \ + --hash=sha256:2ed09183922d66c4ec5fdaa59b4d14e105c084dd0febd27452de8f6f74704143 \ + --hash=sha256:35c00f637cd0b9d5b6c6bd11b6c3359194a8eba9c46d4e875a3660e3b400005f \ + --hash=sha256:37480760ae08065437e6573d14be973112c9e6dcaf5f11d00147ee74f37a3829 \ + --hash=sha256:3b224890962a2d7b57cf5eeb16ccaafba6083f7b811829f00476309bce2fe0fd \ + --hash=sha256:5a0f09cefded00e648a127048119f77bc2b2ec61e736660b5789e638f43cc397 \ + --hash=sha256:5b72205a360f3b6176485a333256b9bcd48700fc755fef51c8e7e67c4b63e3ac \ + --hash=sha256:7e53db173370dea832190870e975a1e09c86a879b613948f09eb49324218c14d \ + --hash=sha256:7febc3094125fc126a7f6fb1f420d0da639f3f32cb15c8ff0dc3997c4549f51a \ + --hash=sha256:80907d3faa55dc5434a16579952ac6da800935cd98d14dbd62f6f042c7f5e839 \ + --hash=sha256:86defa8d248c3fa029da68ce61fe735432b047e32179883bdb1e79ed9bb8195e \ + --hash=sha256:8ac4f9ead4bbd0bc8ab2d318f97d85147167a488be0e08814a37eb2f439d5cf6 \ + --hash=sha256:93530900d14c37a46ce3d6c9e6fd35dbe5f5601bf6b3a5c325c7bffc030344d9 \ + --hash=sha256:9eeb77214afae972a00dee47382d2591abe77bdae166bda672fb1e24702a3860 \ + --hash=sha256:b5f4dfe950ff0479f1f00eda09c18798d4f49b98f4e2006d644b3301682ebdca \ + --hash=sha256:c3391bd8e6de35f6f1140e50aaeb3e2b3d6a9012536ca23ab0d9c35ec18c8a91 \ + --hash=sha256:c880eba5175f4307129784eca96f4e70b88e57aa3f680aeba3bab0e980b0f37d \ + --hash=sha256:cecfefa17042941f94ab54f769c8ce0fe14beff2694e9ac684176a2535bf9714 \ + --hash=sha256:e40211b4923ba5a6dc9769eab704bdb3fbb58d56c5b336d30996c24fcf12aadb \ + --hash=sha256:efc8ad4e6fc4f1752ebfb58aefece8b4e3c4cae940b0994d43649bdfce8d0d4f # via # gcp-releasetool # secretstorage @@ -382,6 +382,7 @@ protobuf==3.20.3 \ # gcp-docuploader # gcp-releasetool # google-api-core + # googleapis-common-protos pyasn1==0.4.8 \ --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba From bdcca1c60a8f4453e39751e156616e23a7c88366 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 9 Oct 2023 11:03:46 -0400 Subject: [PATCH 125/210] chore: [autoapprove] Update `black` and `isort` to latest versions (#216) Source-Link: https://github.com/googleapis/synthtool/commit/0c7b0333f44b2b7075447f43a121a12d15a7b76a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:08e34975760f002746b1d8c86fdc90660be45945ee6d9db914d1508acdf9a547 Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 4 +-- packages/db-dtypes/.kokoro/requirements.txt | 6 ++-- packages/db-dtypes/.pre-commit-config.yaml | 2 +- packages/db-dtypes/noxfile.py | 36 +++++++++++--------- 4 files changed, 25 insertions(+), 23 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index a9bdb1b7ac0f..dd98abbdeebe 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:fac304457974bb530cc5396abd4ab25d26a469cd3bc97cbfb18c8d4324c584eb -# created: 2023-10-02T21:31:03.517640371Z + digest: sha256:08e34975760f002746b1d8c86fdc90660be45945ee6d9db914d1508acdf9a547 +# created: 2023-10-09T14:06:13.397766266Z diff --git a/packages/db-dtypes/.kokoro/requirements.txt b/packages/db-dtypes/.kokoro/requirements.txt index 96d593c8c82a..0332d3267e15 100644 --- a/packages/db-dtypes/.kokoro/requirements.txt +++ b/packages/db-dtypes/.kokoro/requirements.txt @@ -467,9 +467,9 @@ typing-extensions==4.4.0 \ --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e # via -r requirements.in -urllib3==1.26.12 \ - --hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \ - --hash=sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997 +urllib3==1.26.17 \ + --hash=sha256:24d6a242c28d29af46c3fae832c36db3bbebcc533dd1bb549172cd739c82df21 \ + --hash=sha256:94a757d178c9be92ef5539b8840d48dc9cf1b2709c9d6b588232a055c524458b # via # requests # twine diff --git a/packages/db-dtypes/.pre-commit-config.yaml b/packages/db-dtypes/.pre-commit-config.yaml index 19409cbd37a4..6a8e16950664 100644 --- a/packages/db-dtypes/.pre-commit-config.yaml +++ b/packages/db-dtypes/.pre-commit-config.yaml @@ -22,7 +22,7 @@ repos: - id: end-of-file-fixer - id: check-yaml - repo: https://github.com/psf/black - rev: 22.3.0 + rev: 23.7.0 hooks: - id: black - repo: https://github.com/pycqa/flake8 diff --git a/packages/db-dtypes/noxfile.py b/packages/db-dtypes/noxfile.py index 86a34d9199e5..408b446b3c45 100644 --- a/packages/db-dtypes/noxfile.py +++ b/packages/db-dtypes/noxfile.py @@ -17,23 +17,25 @@ # Generated by synthtool. DO NOT EDIT! from __future__ import absolute_import + import os import pathlib import re import re import shutil +from typing import Dict, List import warnings import nox FLAKE8_VERSION = "flake8==6.1.0" -BLACK_VERSION = "black==22.3.0" -ISORT_VERSION = "isort==5.10.1" +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" LINT_PATHS = ["docs", "db_dtypes", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" -UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11"] UNIT_TEST_STANDARD_DEPENDENCIES = [ "mock", "asyncmock", @@ -41,23 +43,23 @@ "pytest-cov", "pytest-asyncio", ] -UNIT_TEST_EXTERNAL_DEPENDENCIES = [] -UNIT_TEST_LOCAL_DEPENDENCIES = [] -UNIT_TEST_DEPENDENCIES = [] -UNIT_TEST_EXTRAS = [] -UNIT_TEST_EXTRAS_BY_PYTHON = {} - -SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] -SYSTEM_TEST_STANDARD_DEPENDENCIES = [ +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8"] +SYSTEM_TEST_STANDARD_DEPENDENCIES: List[str] = [ "mock", "pytest", "google-cloud-testutils", ] -SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] -SYSTEM_TEST_LOCAL_DEPENDENCIES = [] -SYSTEM_TEST_DEPENDENCIES = [] -SYSTEM_TEST_EXTRAS = [] -SYSTEM_TEST_EXTRAS_BY_PYTHON = {} +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -72,6 +74,7 @@ "lint_setup_py", "blacken", "docs", + "format", ] # Error if a python version is missing @@ -284,7 +287,6 @@ def unit_prerelease(session): def install_systemtest_dependencies(session, *constraints): - # Use pre-release gRPC for system tests. # Exclude version 1.52.0rc1 which has a known issue. # See https://github.com/grpc/grpc/issues/32163 From 56dcda8db733e3503d8a8be285752a25fa035593 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 26 Oct 2023 14:24:41 -0700 Subject: [PATCH 126/210] chore: rename rst files to avoid conflict with service names (#218) Source-Link: https://github.com/googleapis/synthtool/commit/d52e638b37b091054c869bfa6f5a9fedaba9e0dd Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:4f9b3b106ad0beafc2c8a415e3f62c1a0cc23cabea115dbe841b848f581cfe99 Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 4 ++-- packages/db-dtypes/.kokoro/requirements.txt | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index dd98abbdeebe..7f291dbd5f9b 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:08e34975760f002746b1d8c86fdc90660be45945ee6d9db914d1508acdf9a547 -# created: 2023-10-09T14:06:13.397766266Z + digest: sha256:4f9b3b106ad0beafc2c8a415e3f62c1a0cc23cabea115dbe841b848f581cfe99 +# created: 2023-10-18T20:26:37.410353675Z diff --git a/packages/db-dtypes/.kokoro/requirements.txt b/packages/db-dtypes/.kokoro/requirements.txt index 0332d3267e15..16170d0ca7b8 100644 --- a/packages/db-dtypes/.kokoro/requirements.txt +++ b/packages/db-dtypes/.kokoro/requirements.txt @@ -467,9 +467,9 @@ typing-extensions==4.4.0 \ --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e # via -r requirements.in -urllib3==1.26.17 \ - --hash=sha256:24d6a242c28d29af46c3fae832c36db3bbebcc533dd1bb549172cd739c82df21 \ - --hash=sha256:94a757d178c9be92ef5539b8840d48dc9cf1b2709c9d6b588232a055c524458b +urllib3==1.26.18 \ + --hash=sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07 \ + --hash=sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0 # via # requests # twine From 0495a4029b41cbc2a3e4c49c80ae28de17a7c9c9 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 2 Nov 2023 21:08:40 -0400 Subject: [PATCH 127/210] chore: update docfx minimum Python version (#219) Source-Link: https://github.com/googleapis/synthtool/commit/bc07fd415c39853b382bcf8315f8eeacdf334055 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:30470597773378105e239b59fce8eb27cc97375580d592699206d17d117143d0 Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 4 ++-- packages/db-dtypes/.github/workflows/docs.yml | 2 +- packages/db-dtypes/noxfile.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 7f291dbd5f9b..ec696b558c35 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:4f9b3b106ad0beafc2c8a415e3f62c1a0cc23cabea115dbe841b848f581cfe99 -# created: 2023-10-18T20:26:37.410353675Z + digest: sha256:30470597773378105e239b59fce8eb27cc97375580d592699206d17d117143d0 +# created: 2023-11-03T00:57:07.335914631Z diff --git a/packages/db-dtypes/.github/workflows/docs.yml b/packages/db-dtypes/.github/workflows/docs.yml index e97d89e484c9..221806cedf58 100644 --- a/packages/db-dtypes/.github/workflows/docs.yml +++ b/packages/db-dtypes/.github/workflows/docs.yml @@ -28,7 +28,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.9" + python-version: "3.10" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel diff --git a/packages/db-dtypes/noxfile.py b/packages/db-dtypes/noxfile.py index 408b446b3c45..53475f241c88 100644 --- a/packages/db-dtypes/noxfile.py +++ b/packages/db-dtypes/noxfile.py @@ -398,7 +398,7 @@ def docs(session): ) -@nox.session(python="3.9") +@nox.session(python="3.10") def docfx(session): """Build the docfx yaml files for this library.""" From c0f2d7fa2d9484f321e4ccc65db4b1d3cba7cf0b Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 8 Nov 2023 12:03:05 -0800 Subject: [PATCH 128/210] chore: bump urllib3 from 1.26.12 to 1.26.18 (#220) Source-Link: https://github.com/googleapis/synthtool/commit/febacccc98d6d224aff9d0bd0373bb5a4cd5969c Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:caffe0a9277daeccc4d1de5c9b55ebba0901b57c2f713ec9c876b0d4ec064f61 Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 4 +- packages/db-dtypes/.kokoro/requirements.txt | 532 ++++++++++--------- 2 files changed, 277 insertions(+), 259 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index ec696b558c35..453b540c1e58 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:30470597773378105e239b59fce8eb27cc97375580d592699206d17d117143d0 -# created: 2023-11-03T00:57:07.335914631Z + digest: sha256:caffe0a9277daeccc4d1de5c9b55ebba0901b57c2f713ec9c876b0d4ec064f61 +# created: 2023-11-08T19:46:45.022803742Z diff --git a/packages/db-dtypes/.kokoro/requirements.txt b/packages/db-dtypes/.kokoro/requirements.txt index 16170d0ca7b8..8957e21104e2 100644 --- a/packages/db-dtypes/.kokoro/requirements.txt +++ b/packages/db-dtypes/.kokoro/requirements.txt @@ -4,91 +4,75 @@ # # pip-compile --allow-unsafe --generate-hashes requirements.in # -argcomplete==2.0.0 \ - --hash=sha256:6372ad78c89d662035101418ae253668445b391755cfe94ea52f1b9d22425b20 \ - --hash=sha256:cffa11ea77999bb0dd27bb25ff6dc142a6796142f68d45b1a26b11f58724561e +argcomplete==3.1.4 \ + --hash=sha256:72558ba729e4c468572609817226fb0a6e7e9a0a7d477b882be168c0b4a62b94 \ + --hash=sha256:fbe56f8cda08aa9a04b307d8482ea703e96a6a801611acb4be9bf3942017989f # via nox -attrs==22.1.0 \ - --hash=sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6 \ - --hash=sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c +attrs==23.1.0 \ + --hash=sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04 \ + --hash=sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015 # via gcp-releasetool -bleach==5.0.1 \ - --hash=sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a \ - --hash=sha256:0d03255c47eb9bd2f26aa9bb7f2107732e7e8fe195ca2f64709fcf3b0a4a085c - # via readme-renderer -cachetools==5.2.0 \ - --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ - --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db +cachetools==5.3.2 \ + --hash=sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2 \ + --hash=sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1 # via google-auth certifi==2023.7.22 \ --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9 # via requests -cffi==1.15.1 \ - --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ - --hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \ - --hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \ - --hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \ - --hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \ - --hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \ - --hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \ - --hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \ - --hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \ - --hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \ - --hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \ - --hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \ - --hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \ - --hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \ - --hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \ - --hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \ - --hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \ - --hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \ - --hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \ - --hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \ - --hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \ - --hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \ - --hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \ - --hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \ - --hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \ - --hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \ - --hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \ - --hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \ - --hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \ - --hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \ - --hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \ - --hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \ - --hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \ - --hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \ - --hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \ - --hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \ - --hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \ - --hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \ - --hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \ - --hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \ - --hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \ - --hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \ - --hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \ - --hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \ - --hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \ - --hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \ - --hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \ - --hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \ - --hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \ - --hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \ - --hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \ - --hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \ - --hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \ - --hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \ - --hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \ - --hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \ - --hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \ - --hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \ - --hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \ - --hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \ - --hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \ - --hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \ - --hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \ - --hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0 +cffi==1.16.0 \ + --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ + --hash=sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a \ + --hash=sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417 \ + --hash=sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab \ + --hash=sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520 \ + --hash=sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36 \ + --hash=sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743 \ + --hash=sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8 \ + --hash=sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed \ + --hash=sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684 \ + --hash=sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56 \ + --hash=sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324 \ + --hash=sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d \ + --hash=sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235 \ + --hash=sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e \ + --hash=sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088 \ + --hash=sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000 \ + --hash=sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7 \ + --hash=sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e \ + --hash=sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673 \ + --hash=sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c \ + --hash=sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe \ + --hash=sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2 \ + --hash=sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098 \ + --hash=sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8 \ + --hash=sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a \ + --hash=sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0 \ + --hash=sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b \ + --hash=sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896 \ + --hash=sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e \ + --hash=sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9 \ + --hash=sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2 \ + --hash=sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b \ + --hash=sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6 \ + --hash=sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404 \ + --hash=sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f \ + --hash=sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0 \ + --hash=sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4 \ + --hash=sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc \ + --hash=sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936 \ + --hash=sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba \ + --hash=sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872 \ + --hash=sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb \ + --hash=sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614 \ + --hash=sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1 \ + --hash=sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d \ + --hash=sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969 \ + --hash=sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b \ + --hash=sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4 \ + --hash=sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627 \ + --hash=sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956 \ + --hash=sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357 # via cryptography charset-normalizer==2.1.1 \ --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ @@ -109,78 +93,74 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -commonmark==0.9.1 \ - --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ - --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 - # via rich -cryptography==41.0.4 \ - --hash=sha256:004b6ccc95943f6a9ad3142cfabcc769d7ee38a3f60fb0dddbfb431f818c3a67 \ - --hash=sha256:047c4603aeb4bbd8db2756e38f5b8bd7e94318c047cfe4efeb5d715e08b49311 \ - --hash=sha256:0d9409894f495d465fe6fda92cb70e8323e9648af912d5b9141d616df40a87b8 \ - --hash=sha256:23a25c09dfd0d9f28da2352503b23e086f8e78096b9fd585d1d14eca01613e13 \ - --hash=sha256:2ed09183922d66c4ec5fdaa59b4d14e105c084dd0febd27452de8f6f74704143 \ - --hash=sha256:35c00f637cd0b9d5b6c6bd11b6c3359194a8eba9c46d4e875a3660e3b400005f \ - --hash=sha256:37480760ae08065437e6573d14be973112c9e6dcaf5f11d00147ee74f37a3829 \ - --hash=sha256:3b224890962a2d7b57cf5eeb16ccaafba6083f7b811829f00476309bce2fe0fd \ - --hash=sha256:5a0f09cefded00e648a127048119f77bc2b2ec61e736660b5789e638f43cc397 \ - --hash=sha256:5b72205a360f3b6176485a333256b9bcd48700fc755fef51c8e7e67c4b63e3ac \ - --hash=sha256:7e53db173370dea832190870e975a1e09c86a879b613948f09eb49324218c14d \ - --hash=sha256:7febc3094125fc126a7f6fb1f420d0da639f3f32cb15c8ff0dc3997c4549f51a \ - --hash=sha256:80907d3faa55dc5434a16579952ac6da800935cd98d14dbd62f6f042c7f5e839 \ - --hash=sha256:86defa8d248c3fa029da68ce61fe735432b047e32179883bdb1e79ed9bb8195e \ - --hash=sha256:8ac4f9ead4bbd0bc8ab2d318f97d85147167a488be0e08814a37eb2f439d5cf6 \ - --hash=sha256:93530900d14c37a46ce3d6c9e6fd35dbe5f5601bf6b3a5c325c7bffc030344d9 \ - --hash=sha256:9eeb77214afae972a00dee47382d2591abe77bdae166bda672fb1e24702a3860 \ - --hash=sha256:b5f4dfe950ff0479f1f00eda09c18798d4f49b98f4e2006d644b3301682ebdca \ - --hash=sha256:c3391bd8e6de35f6f1140e50aaeb3e2b3d6a9012536ca23ab0d9c35ec18c8a91 \ - --hash=sha256:c880eba5175f4307129784eca96f4e70b88e57aa3f680aeba3bab0e980b0f37d \ - --hash=sha256:cecfefa17042941f94ab54f769c8ce0fe14beff2694e9ac684176a2535bf9714 \ - --hash=sha256:e40211b4923ba5a6dc9769eab704bdb3fbb58d56c5b336d30996c24fcf12aadb \ - --hash=sha256:efc8ad4e6fc4f1752ebfb58aefece8b4e3c4cae940b0994d43649bdfce8d0d4f +cryptography==41.0.5 \ + --hash=sha256:0c327cac00f082013c7c9fb6c46b7cc9fa3c288ca702c74773968173bda421bf \ + --hash=sha256:0d2a6a598847c46e3e321a7aef8af1436f11c27f1254933746304ff014664d84 \ + --hash=sha256:227ec057cd32a41c6651701abc0328135e472ed450f47c2766f23267b792a88e \ + --hash=sha256:22892cc830d8b2c89ea60148227631bb96a7da0c1b722f2aac8824b1b7c0b6b8 \ + --hash=sha256:392cb88b597247177172e02da6b7a63deeff1937fa6fec3bbf902ebd75d97ec7 \ + --hash=sha256:3be3ca726e1572517d2bef99a818378bbcf7d7799d5372a46c79c29eb8d166c1 \ + --hash=sha256:573eb7128cbca75f9157dcde974781209463ce56b5804983e11a1c462f0f4e88 \ + --hash=sha256:580afc7b7216deeb87a098ef0674d6ee34ab55993140838b14c9b83312b37b86 \ + --hash=sha256:5a70187954ba7292c7876734183e810b728b4f3965fbe571421cb2434d279179 \ + --hash=sha256:73801ac9736741f220e20435f84ecec75ed70eda90f781a148f1bad546963d81 \ + --hash=sha256:7d208c21e47940369accfc9e85f0de7693d9a5d843c2509b3846b2db170dfd20 \ + --hash=sha256:8254962e6ba1f4d2090c44daf50a547cd5f0bf446dc658a8e5f8156cae0d8548 \ + --hash=sha256:88417bff20162f635f24f849ab182b092697922088b477a7abd6664ddd82291d \ + --hash=sha256:a48e74dad1fb349f3dc1d449ed88e0017d792997a7ad2ec9587ed17405667e6d \ + --hash=sha256:b948e09fe5fb18517d99994184854ebd50b57248736fd4c720ad540560174ec5 \ + --hash=sha256:c707f7afd813478e2019ae32a7c49cd932dd60ab2d2a93e796f68236b7e1fbf1 \ + --hash=sha256:d38e6031e113b7421db1de0c1b1f7739564a88f1684c6b89234fbf6c11b75147 \ + --hash=sha256:d3977f0e276f6f5bf245c403156673db103283266601405376f075c849a0b936 \ + --hash=sha256:da6a0ff8f1016ccc7477e6339e1d50ce5f59b88905585f77193ebd5068f1e797 \ + --hash=sha256:e270c04f4d9b5671ebcc792b3ba5d4488bf7c42c3c241a3748e2599776f29696 \ + --hash=sha256:e886098619d3815e0ad5790c973afeee2c0e6e04b4da90b88e6bd06e2a0b1b72 \ + --hash=sha256:ec3b055ff8f1dce8e6ef28f626e0972981475173d7973d63f271b29c8a2897da \ + --hash=sha256:fba1e91467c65fe64a82c689dc6cf58151158993b13eb7a7f3f4b7f395636723 # via # gcp-releasetool # secretstorage -distlib==0.3.6 \ - --hash=sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46 \ - --hash=sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e +distlib==0.3.7 \ + --hash=sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057 \ + --hash=sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8 # via virtualenv -docutils==0.19 \ - --hash=sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6 \ - --hash=sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc +docutils==0.20.1 \ + --hash=sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6 \ + --hash=sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b # via readme-renderer -filelock==3.8.0 \ - --hash=sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc \ - --hash=sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4 +filelock==3.13.1 \ + --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ + --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c # via virtualenv -gcp-docuploader==0.6.4 \ - --hash=sha256:01486419e24633af78fd0167db74a2763974765ee8078ca6eb6964d0ebd388af \ - --hash=sha256:70861190c123d907b3b067da896265ead2eeb9263969d6955c9e0bb091b5ccbf +gcp-docuploader==0.6.5 \ + --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ + --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea # via -r requirements.in -gcp-releasetool==1.10.5 \ - --hash=sha256:174b7b102d704b254f2a26a3eda2c684fd3543320ec239baf771542a2e58e109 \ - --hash=sha256:e29d29927fe2ca493105a82958c6873bb2b90d503acac56be2c229e74de0eec9 +gcp-releasetool==1.16.0 \ + --hash=sha256:27bf19d2e87aaa884096ff941aa3c592c482be3d6a2bfe6f06afafa6af2353e3 \ + --hash=sha256:a316b197a543fd036209d0caba7a8eb4d236d8e65381c80cbc6d7efaa7606d63 # via -r requirements.in -google-api-core==2.10.2 \ - --hash=sha256:10c06f7739fe57781f87523375e8e1a3a4674bf6392cd6131a3222182b971320 \ - --hash=sha256:34f24bd1d5f72a8c4519773d99ca6bf080a6c4e041b4e9f024fe230191dda62e +google-api-core==2.12.0 \ + --hash=sha256:c22e01b1e3c4dcd90998494879612c38d0a3411d1f7b679eb89e2abe3ce1f553 \ + --hash=sha256:ec6054f7d64ad13b41e43d96f735acbd763b0f3b695dabaa2d579673f6a6e160 # via # google-cloud-core # google-cloud-storage -google-auth==2.14.1 \ - --hash=sha256:ccaa901f31ad5cbb562615eb8b664b3dd0bf5404a67618e642307f00613eda4d \ - --hash=sha256:f5d8701633bebc12e0deea4df8abd8aff31c28b355360597f7f2ee60f2e4d016 +google-auth==2.23.4 \ + --hash=sha256:79905d6b1652187def79d491d6e23d0cbb3a21d3c7ba0dbaa9c8a01906b13ff3 \ + --hash=sha256:d4bbc92fe4b8bfd2f3e8d88e5ba7085935da208ee38a134fc280e7ce682a05f2 # via # gcp-releasetool # google-api-core # google-cloud-core # google-cloud-storage -google-cloud-core==2.3.2 \ - --hash=sha256:8417acf6466be2fa85123441696c4badda48db314c607cf1e5d543fa8bdc22fe \ - --hash=sha256:b9529ee7047fd8d4bf4a2182de619154240df17fbe60ead399078c1ae152af9a +google-cloud-core==2.3.3 \ + --hash=sha256:37b80273c8d7eee1ae816b3a20ae43585ea50506cb0e60f3cf5be5f87f1373cb \ + --hash=sha256:fbd11cad3e98a7e5b0343dc07cb1039a5ffd7a5bb96e1f1e27cee4bda4a90863 # via google-cloud-storage -google-cloud-storage==2.6.0 \ - --hash=sha256:104ca28ae61243b637f2f01455cc8a05e8f15a2a18ced96cb587241cdd3820f5 \ - --hash=sha256:4ad0415ff61abdd8bb2ae81c1f8f7ec7d91a1011613f2db87c614c550f97bfe9 +google-cloud-storage==2.13.0 \ + --hash=sha256:ab0bf2e1780a1b74cf17fccb13788070b729f50c252f0c94ada2aae0ca95437d \ + --hash=sha256:f62dc4c7b6cd4360d072e3deb28035fbdad491ac3d9b0b1815a12daea10f37c7 # via gcp-docuploader google-crc32c==1.5.0 \ --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ @@ -251,29 +231,31 @@ google-crc32c==1.5.0 \ --hash=sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183 \ --hash=sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556 \ --hash=sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4 - # via google-resumable-media -google-resumable-media==2.4.0 \ - --hash=sha256:2aa004c16d295c8f6c33b2b4788ba59d366677c0a25ae7382436cb30f776deaa \ - --hash=sha256:8d5518502f92b9ecc84ac46779bd4f09694ecb3ba38a3e7ca737a86d15cbca1f + # via + # google-cloud-storage + # google-resumable-media +google-resumable-media==2.6.0 \ + --hash=sha256:972852f6c65f933e15a4a210c2b96930763b47197cdf4aa5f5bea435efb626e7 \ + --hash=sha256:fc03d344381970f79eebb632a3c18bb1828593a2dc5572b5f90115ef7d11e81b # via google-cloud-storage -googleapis-common-protos==1.57.0 \ - --hash=sha256:27a849d6205838fb6cc3c1c21cb9800707a661bb21c6ce7fb13e99eb1f8a0c46 \ - --hash=sha256:a9f4a1d7f6d9809657b7f1316a1aa527f6664891531bcfcc13b6696e685f443c +googleapis-common-protos==1.61.0 \ + --hash=sha256:22f1915393bb3245343f6efe87f6fe868532efc12aa26b391b15132e1279f1c0 \ + --hash=sha256:8a64866a97f6304a7179873a465d6eee97b7a24ec6cfd78e0f575e96b821240b # via google-api-core idna==3.4 \ --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 # via requests -importlib-metadata==5.0.0 \ - --hash=sha256:da31db32b304314d044d3c12c79bd59e307889b287ad12ff387b3500835fc2ab \ - --hash=sha256:ddb0e35065e8938f867ed4928d0ae5bf2a53b7773871bfe6bcc7e4fcdc7dea43 +importlib-metadata==6.8.0 \ + --hash=sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb \ + --hash=sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743 # via # -r requirements.in # keyring # twine -jaraco-classes==3.2.3 \ - --hash=sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158 \ - --hash=sha256:89559fa5c1d3c34eff6f631ad80bb21f378dbcbb35dd161fd2c6b93f5be2f98a +jaraco-classes==3.3.0 \ + --hash=sha256:10afa92b6743f25c0cf5f37c6bb6e18e2c5bb84a16527ccfc0040ea377e7aaeb \ + --hash=sha256:c063dd08e89217cee02c8d5e5ec560f2c8ce6cdc2fcdc2e68f7b2e5547ed3621 # via keyring jeepney==0.8.0 \ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ @@ -285,75 +267,121 @@ jinja2==3.1.2 \ --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 # via gcp-releasetool -keyring==23.11.0 \ - --hash=sha256:3dd30011d555f1345dec2c262f0153f2f0ca6bca041fb1dc4588349bb4c0ac1e \ - --hash=sha256:ad192263e2cdd5f12875dedc2da13534359a7e760e77f8d04b50968a821c2361 +keyring==24.2.0 \ + --hash=sha256:4901caaf597bfd3bbd78c9a0c7c4c29fcd8310dab2cffefe749e916b6527acd6 \ + --hash=sha256:ca0746a19ec421219f4d713f848fa297a661a8a8c1504867e55bfb5e09091509 # via # gcp-releasetool # twine -markupsafe==2.1.1 \ - --hash=sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003 \ - --hash=sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88 \ - --hash=sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5 \ - --hash=sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7 \ - --hash=sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a \ - --hash=sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603 \ - --hash=sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1 \ - --hash=sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135 \ - --hash=sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247 \ - --hash=sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6 \ - --hash=sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601 \ - --hash=sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77 \ - --hash=sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02 \ - --hash=sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e \ - --hash=sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63 \ - --hash=sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f \ - --hash=sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980 \ - --hash=sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b \ - --hash=sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812 \ - --hash=sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff \ - --hash=sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96 \ - --hash=sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1 \ - --hash=sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925 \ - --hash=sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a \ - --hash=sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6 \ - --hash=sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e \ - --hash=sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f \ - --hash=sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4 \ - --hash=sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f \ - --hash=sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3 \ - --hash=sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c \ - --hash=sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a \ - --hash=sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417 \ - --hash=sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a \ - --hash=sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a \ - --hash=sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37 \ - --hash=sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452 \ - --hash=sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933 \ - --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ - --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 +markdown-it-py==3.0.0 \ + --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ + --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb + # via rich +markupsafe==2.1.3 \ + --hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \ + --hash=sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e \ + --hash=sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431 \ + --hash=sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686 \ + --hash=sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c \ + --hash=sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559 \ + --hash=sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc \ + --hash=sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb \ + --hash=sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939 \ + --hash=sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c \ + --hash=sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0 \ + --hash=sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4 \ + --hash=sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9 \ + --hash=sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575 \ + --hash=sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba \ + --hash=sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d \ + --hash=sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd \ + --hash=sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3 \ + --hash=sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00 \ + --hash=sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155 \ + --hash=sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac \ + --hash=sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52 \ + --hash=sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f \ + --hash=sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8 \ + --hash=sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b \ + --hash=sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007 \ + --hash=sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24 \ + --hash=sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea \ + --hash=sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198 \ + --hash=sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0 \ + --hash=sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee \ + --hash=sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be \ + --hash=sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2 \ + --hash=sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1 \ + --hash=sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707 \ + --hash=sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6 \ + --hash=sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c \ + --hash=sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58 \ + --hash=sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823 \ + --hash=sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779 \ + --hash=sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636 \ + --hash=sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c \ + --hash=sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad \ + --hash=sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee \ + --hash=sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc \ + --hash=sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2 \ + --hash=sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48 \ + --hash=sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7 \ + --hash=sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e \ + --hash=sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b \ + --hash=sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa \ + --hash=sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5 \ + --hash=sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e \ + --hash=sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb \ + --hash=sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9 \ + --hash=sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57 \ + --hash=sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc \ + --hash=sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc \ + --hash=sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2 \ + --hash=sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11 # via jinja2 -more-itertools==9.0.0 \ - --hash=sha256:250e83d7e81d0c87ca6bd942e6aeab8cc9daa6096d12c5308f3f92fa5e5c1f41 \ - --hash=sha256:5a6257e40878ef0520b1803990e3e22303a41b5714006c32a3fd8304b26ea1ab +mdurl==0.1.2 \ + --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ + --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba + # via markdown-it-py +more-itertools==10.1.0 \ + --hash=sha256:626c369fa0eb37bac0291bce8259b332fd59ac792fa5497b59837309cd5b114a \ + --hash=sha256:64e0735fcfdc6f3464ea133afe8ea4483b1c5fe3a3d69852e6503b43a0b222e6 # via jaraco-classes -nox==2022.11.21 \ - --hash=sha256:0e41a990e290e274cb205a976c4c97ee3c5234441a8132c8c3fd9ea3c22149eb \ - --hash=sha256:e21c31de0711d1274ca585a2c5fde36b1aa962005ba8e9322bf5eeed16dcd684 +nh3==0.2.14 \ + --hash=sha256:116c9515937f94f0057ef50ebcbcc10600860065953ba56f14473ff706371873 \ + --hash=sha256:18415df36db9b001f71a42a3a5395db79cf23d556996090d293764436e98e8ad \ + --hash=sha256:203cac86e313cf6486704d0ec620a992c8bc164c86d3a4fd3d761dd552d839b5 \ + --hash=sha256:2b0be5c792bd43d0abef8ca39dd8acb3c0611052ce466d0401d51ea0d9aa7525 \ + --hash=sha256:377aaf6a9e7c63962f367158d808c6a1344e2b4f83d071c43fbd631b75c4f0b2 \ + --hash=sha256:525846c56c2bcd376f5eaee76063ebf33cf1e620c1498b2a40107f60cfc6054e \ + --hash=sha256:5529a3bf99402c34056576d80ae5547123f1078da76aa99e8ed79e44fa67282d \ + --hash=sha256:7771d43222b639a4cd9e341f870cee336b9d886de1ad9bec8dddab22fe1de450 \ + --hash=sha256:88c753efbcdfc2644a5012938c6b9753f1c64a5723a67f0301ca43e7b85dcf0e \ + --hash=sha256:93a943cfd3e33bd03f77b97baa11990148687877b74193bf777956b67054dcc6 \ + --hash=sha256:9be2f68fb9a40d8440cbf34cbf40758aa7f6093160bfc7fb018cce8e424f0c3a \ + --hash=sha256:a0c509894fd4dccdff557068e5074999ae3b75f4c5a2d6fb5415e782e25679c4 \ + --hash=sha256:ac8056e937f264995a82bf0053ca898a1cb1c9efc7cd68fa07fe0060734df7e4 \ + --hash=sha256:aed56a86daa43966dd790ba86d4b810b219f75b4bb737461b6886ce2bde38fd6 \ + --hash=sha256:e8986f1dd3221d1e741fda0a12eaa4a273f1d80a35e31a1ffe579e7c621d069e \ + --hash=sha256:f99212a81c62b5f22f9e7c3e347aa00491114a5647e1f13bbebd79c3e5f08d75 + # via readme-renderer +nox==2023.4.22 \ + --hash=sha256:0b1adc619c58ab4fa57d6ab2e7823fe47a32e70202f287d78474adcc7bda1891 \ + --hash=sha256:46c0560b0dc609d7d967dc99e22cb463d3c4caf54a5fda735d6c11b5177e3a9f # via -r requirements.in -packaging==21.3 \ - --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \ - --hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522 +packaging==23.2 \ + --hash=sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5 \ + --hash=sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7 # via # gcp-releasetool # nox -pkginfo==1.8.3 \ - --hash=sha256:848865108ec99d4901b2f7e84058b6e7660aae8ae10164e015a6dcf5b242a594 \ - --hash=sha256:a84da4318dd86f870a9447a8c98340aa06216bfc6f2b7bdc4b8766984ae1867c +pkginfo==1.9.6 \ + --hash=sha256:4b7a555a6d5a22169fcc9cf7bfd78d296b0361adad412a346c1226849af5e546 \ + --hash=sha256:8fd5896e8718a4372f0ea9cc9d96f6417c9b986e23a4d116dda26b62cc29d046 # via twine -platformdirs==2.5.4 \ - --hash=sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7 \ - --hash=sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10 +platformdirs==3.11.0 \ + --hash=sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3 \ + --hash=sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e # via virtualenv protobuf==3.20.3 \ --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \ @@ -383,34 +411,30 @@ protobuf==3.20.3 \ # gcp-releasetool # google-api-core # googleapis-common-protos -pyasn1==0.4.8 \ - --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ - --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba +pyasn1==0.5.0 \ + --hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \ + --hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde # via # pyasn1-modules # rsa -pyasn1-modules==0.2.8 \ - --hash=sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e \ - --hash=sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74 +pyasn1-modules==0.3.0 \ + --hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \ + --hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d # via google-auth pycparser==2.21 \ --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 # via cffi -pygments==2.15.0 \ - --hash=sha256:77a3299119af881904cd5ecd1ac6a66214b6e9bed1f2db16993b54adede64094 \ - --hash=sha256:f7e36cffc4c517fbc252861b9a6e4644ca0e5abadf9a113c72d1358ad09b9500 +pygments==2.16.1 \ + --hash=sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692 \ + --hash=sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29 # via # readme-renderer # rich -pyjwt==2.6.0 \ - --hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \ - --hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14 +pyjwt==2.8.0 \ + --hash=sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de \ + --hash=sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320 # via gcp-releasetool -pyparsing==3.0.9 \ - --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ - --hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc - # via packaging pyperclip==1.8.2 \ --hash=sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57 # via gcp-releasetool @@ -418,9 +442,9 @@ python-dateutil==2.8.2 \ --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 # via gcp-releasetool -readme-renderer==37.3 \ - --hash=sha256:cd653186dfc73055656f090f227f5cb22a046d7f71a841dfa305f55c9a513273 \ - --hash=sha256:f67a16caedfa71eef48a31b39708637a6f4664c4394801a7b0d6432d13907343 +readme-renderer==42.0 \ + --hash=sha256:13d039515c1f24de668e2c93f2e877b9dbe6c6c32328b90a40a49d8b2b85f36d \ + --hash=sha256:2d55489f83be4992fe4454939d1a051c33edbab778e82761d060c9fc6b308cd1 # via twine requests==2.31.0 \ --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ @@ -431,17 +455,17 @@ requests==2.31.0 \ # google-cloud-storage # requests-toolbelt # twine -requests-toolbelt==0.10.1 \ - --hash=sha256:18565aa58116d9951ac39baa288d3adb5b3ff975c4f25eee78555d89e8f247f7 \ - --hash=sha256:62e09f7ff5ccbda92772a29f394a49c3ad6cb181d568b1337626b2abb628a63d +requests-toolbelt==1.0.0 \ + --hash=sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6 \ + --hash=sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06 # via twine rfc3986==2.0.0 \ --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c # via twine -rich==12.6.0 \ - --hash=sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e \ - --hash=sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0 +rich==13.6.0 \ + --hash=sha256:2b38e2fe9ca72c9a00170a1a2d20c63c790d0e10ef1fe35eba76e1e7b1d7d245 \ + --hash=sha256:5c14d22737e6d5084ef4771b62d5d4363165b403455a30a1c8ca39dc7b644bef # via twine rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ @@ -455,43 +479,37 @@ six==1.16.0 \ --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 # via - # bleach # gcp-docuploader - # google-auth # python-dateutil -twine==4.0.1 \ - --hash=sha256:42026c18e394eac3e06693ee52010baa5313e4811d5a11050e7d48436cf41b9e \ - --hash=sha256:96b1cf12f7ae611a4a40b6ae8e9570215daff0611828f5fe1f37a16255ab24a0 +twine==4.0.2 \ + --hash=sha256:929bc3c280033347a00f847236564d1c52a3e61b1ac2516c97c48f3ceab756d8 \ + --hash=sha256:9e102ef5fdd5a20661eb88fad46338806c3bd32cf1db729603fe3697b1bc83c8 # via -r requirements.in -typing-extensions==4.4.0 \ - --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ - --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e +typing-extensions==4.8.0 \ + --hash=sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0 \ + --hash=sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef # via -r requirements.in -urllib3==1.26.18 \ - --hash=sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07 \ - --hash=sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0 +urllib3==2.0.7 \ + --hash=sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84 \ + --hash=sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e # via # requests # twine -virtualenv==20.16.7 \ - --hash=sha256:8691e3ff9387f743e00f6bb20f70121f5e4f596cae754531f2b3b3a1b1ac696e \ - --hash=sha256:efd66b00386fdb7dbe4822d172303f40cd05e50e01740b19ea42425cbe653e29 +virtualenv==20.24.6 \ + --hash=sha256:02ece4f56fbf939dbbc33c0715159951d6bf14aaf5457b092e4548e1382455af \ + --hash=sha256:520d056652454c5098a00c0f073611ccbea4c79089331f60bf9d7ba247bb7381 # via nox -webencodings==0.5.1 \ - --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ - --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923 - # via bleach -wheel==0.38.4 \ - --hash=sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac \ - --hash=sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8 +wheel==0.41.3 \ + --hash=sha256:488609bc63a29322326e05560731bf7bfea8e48ad646e1f5e40d366607de0942 \ + --hash=sha256:4d4987ce51a49370ea65c0bfd2234e8ce80a12780820d9dc462597a6e60d0841 # via -r requirements.in -zipp==3.10.0 \ - --hash=sha256:4fcb6f278987a6605757302a6e40e896257570d11c51628968ccb2a47e80c6c1 \ - --hash=sha256:7a7262fd930bd3e36c50b9a64897aec3fafff3dfdeec9623ae22b40e93f99bb8 +zipp==3.17.0 \ + --hash=sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31 \ + --hash=sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==65.5.1 \ - --hash=sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31 \ - --hash=sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f +setuptools==68.2.2 \ + --hash=sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87 \ + --hash=sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a # via -r requirements.in From 7e70efa88150f7776733aa7e94db3c11d4e8276c Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 8 Nov 2023 21:10:26 +0100 Subject: [PATCH 129/210] chore(deps): update all dependencies (#209) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- packages/db-dtypes/.github/workflows/compliance.yml | 4 ++-- packages/db-dtypes/.github/workflows/unittest-prerelease.yml | 2 +- packages/db-dtypes/samples/snippets/requirements-test.txt | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/db-dtypes/.github/workflows/compliance.yml b/packages/db-dtypes/.github/workflows/compliance.yml index 0ae11019d68a..d1740fb8b045 100644 --- a/packages/db-dtypes/.github/workflows/compliance.yml +++ b/packages/db-dtypes/.github/workflows/compliance.yml @@ -11,7 +11,7 @@ jobs: python: ['3.11'] steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Python uses: actions/setup-python@v4 with: @@ -32,7 +32,7 @@ jobs: python: ['3.11'] steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Python uses: actions/setup-python@v4 with: diff --git a/packages/db-dtypes/.github/workflows/unittest-prerelease.yml b/packages/db-dtypes/.github/workflows/unittest-prerelease.yml index 18fc3f09e658..06d2e2be3a1f 100644 --- a/packages/db-dtypes/.github/workflows/unittest-prerelease.yml +++ b/packages/db-dtypes/.github/workflows/unittest-prerelease.yml @@ -11,7 +11,7 @@ jobs: python: ['3.11'] steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Python uses: actions/setup-python@v4 with: diff --git a/packages/db-dtypes/samples/snippets/requirements-test.txt b/packages/db-dtypes/samples/snippets/requirements-test.txt index 70613be0cfe4..2a929edcc789 100644 --- a/packages/db-dtypes/samples/snippets/requirements-test.txt +++ b/packages/db-dtypes/samples/snippets/requirements-test.txt @@ -1 +1 @@ -pytest==7.4.0 +pytest==7.4.2 From 4d50f8bb0259c0fa0aeaea3bc592a07cb8c3332d Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 8 Nov 2023 21:19:36 +0100 Subject: [PATCH 130/210] chore(deps): update all dependencies (#221) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- packages/db-dtypes/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/db-dtypes/samples/snippets/requirements-test.txt b/packages/db-dtypes/samples/snippets/requirements-test.txt index 2a929edcc789..f9708e4b7cf1 100644 --- a/packages/db-dtypes/samples/snippets/requirements-test.txt +++ b/packages/db-dtypes/samples/snippets/requirements-test.txt @@ -1 +1 @@ -pytest==7.4.2 +pytest==7.4.3 From 4f1ee7bf7c1a477fa72ffc68bf896dca005eff24 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 30 Nov 2023 16:09:01 -0800 Subject: [PATCH 131/210] feat: Add support for Python 3.12 (#223) * chore(python): Add Python 3.12 Source-Link: https://github.com/googleapis/synthtool/commit/af16e6d4672cc7b400f144de2fc3068b54ff47d2 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:bacc3af03bff793a03add584537b36b5644342931ad989e3ba1171d3bd5399f5 * add constraints file for python 3.12 * remove constraints file for python 3.6 * Add trove classifiers for python 3.11 / 3.12 * update prerelease and compliance workflows to use python 3.12 --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 4 +- .../.github/workflows/compliance.yml | 4 +- .../.github/workflows/unittest-prerelease.yml | 2 +- .../db-dtypes/.github/workflows/unittest.yml | 2 +- .../.kokoro/samples/python3.12/common.cfg | 40 +++++++++++++++++++ .../.kokoro/samples/python3.12/continuous.cfg | 6 +++ .../samples/python3.12/periodic-head.cfg | 11 +++++ .../.kokoro/samples/python3.12/periodic.cfg | 6 +++ .../.kokoro/samples/python3.12/presubmit.cfg | 6 +++ packages/db-dtypes/CONTRIBUTING.rst | 6 ++- packages/db-dtypes/noxfile.py | 2 +- .../db-dtypes/samples/snippets/noxfile.py | 2 +- packages/db-dtypes/setup.py | 2 + .../db-dtypes/testing/constraints-3.12.txt | 0 .../db-dtypes/testing/constraints-3.6.txt | 11 ----- 15 files changed, 83 insertions(+), 21 deletions(-) create mode 100644 packages/db-dtypes/.kokoro/samples/python3.12/common.cfg create mode 100644 packages/db-dtypes/.kokoro/samples/python3.12/continuous.cfg create mode 100644 packages/db-dtypes/.kokoro/samples/python3.12/periodic-head.cfg create mode 100644 packages/db-dtypes/.kokoro/samples/python3.12/periodic.cfg create mode 100644 packages/db-dtypes/.kokoro/samples/python3.12/presubmit.cfg create mode 100644 packages/db-dtypes/testing/constraints-3.12.txt delete mode 100644 packages/db-dtypes/testing/constraints-3.6.txt diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 453b540c1e58..eb4d9f794dc1 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:caffe0a9277daeccc4d1de5c9b55ebba0901b57c2f713ec9c876b0d4ec064f61 -# created: 2023-11-08T19:46:45.022803742Z + digest: sha256:bacc3af03bff793a03add584537b36b5644342931ad989e3ba1171d3bd5399f5 +# created: 2023-11-23T18:17:28.105124211Z diff --git a/packages/db-dtypes/.github/workflows/compliance.yml b/packages/db-dtypes/.github/workflows/compliance.yml index d1740fb8b045..d876cf3ed925 100644 --- a/packages/db-dtypes/.github/workflows/compliance.yml +++ b/packages/db-dtypes/.github/workflows/compliance.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ['3.11'] + python: ['3.12'] steps: - name: Checkout uses: actions/checkout@v4 @@ -29,7 +29,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ['3.11'] + python: ['3.12'] steps: - name: Checkout uses: actions/checkout@v4 diff --git a/packages/db-dtypes/.github/workflows/unittest-prerelease.yml b/packages/db-dtypes/.github/workflows/unittest-prerelease.yml index 06d2e2be3a1f..86d241e732d6 100644 --- a/packages/db-dtypes/.github/workflows/unittest-prerelease.yml +++ b/packages/db-dtypes/.github/workflows/unittest-prerelease.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ['3.11'] + python: ['3.12'] steps: - name: Checkout uses: actions/checkout@v4 diff --git a/packages/db-dtypes/.github/workflows/unittest.yml b/packages/db-dtypes/.github/workflows/unittest.yml index 8057a7691b12..a32027b49bc2 100644 --- a/packages/db-dtypes/.github/workflows/unittest.yml +++ b/packages/db-dtypes/.github/workflows/unittest.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ['3.7', '3.8', '3.9', '3.10', '3.11'] + python: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12'] steps: - name: Checkout uses: actions/checkout@v3 diff --git a/packages/db-dtypes/.kokoro/samples/python3.12/common.cfg b/packages/db-dtypes/.kokoro/samples/python3.12/common.cfg new file mode 100644 index 000000000000..ad97dcc3f040 --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/python3.12/common.cfg @@ -0,0 +1,40 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.12" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-312" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-db-dtypes-pandas/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-db-dtypes-pandas/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.12/continuous.cfg b/packages/db-dtypes/.kokoro/samples/python3.12/continuous.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/python3.12/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.12/periodic-head.cfg b/packages/db-dtypes/.kokoro/samples/python3.12/periodic-head.cfg new file mode 100644 index 000000000000..ee3d56408db9 --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/python3.12/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-db-dtypes-pandas/.kokoro/test-samples-against-head.sh" +} diff --git a/packages/db-dtypes/.kokoro/samples/python3.12/periodic.cfg b/packages/db-dtypes/.kokoro/samples/python3.12/periodic.cfg new file mode 100644 index 000000000000..71cd1e597e38 --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/python3.12/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/packages/db-dtypes/.kokoro/samples/python3.12/presubmit.cfg b/packages/db-dtypes/.kokoro/samples/python3.12/presubmit.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/python3.12/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/db-dtypes/CONTRIBUTING.rst b/packages/db-dtypes/CONTRIBUTING.rst index f6d526c7ce8c..951fc7a42f85 100644 --- a/packages/db-dtypes/CONTRIBUTING.rst +++ b/packages/db-dtypes/CONTRIBUTING.rst @@ -22,7 +22,7 @@ In order to add a feature: documentation. - The feature must work fully on the following CPython versions: - 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. + 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -72,7 +72,7 @@ We use `nox `__ to instrument our tests. - To run a single unit test:: - $ nox -s unit-3.11 -- -k + $ nox -s unit-3.12 -- -k .. note:: @@ -226,12 +226,14 @@ We support: - `Python 3.9`_ - `Python 3.10`_ - `Python 3.11`_ +- `Python 3.12`_ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ .. _Python 3.9: https://docs.python.org/3.9/ .. _Python 3.10: https://docs.python.org/3.10/ .. _Python 3.11: https://docs.python.org/3.11/ +.. _Python 3.12: https://docs.python.org/3.12/ Supported versions can be found in our ``noxfile.py`` `config`_. diff --git a/packages/db-dtypes/noxfile.py b/packages/db-dtypes/noxfile.py index 53475f241c88..c9432dc0564a 100644 --- a/packages/db-dtypes/noxfile.py +++ b/packages/db-dtypes/noxfile.py @@ -35,7 +35,7 @@ DEFAULT_PYTHON_VERSION = "3.8" -UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] UNIT_TEST_STANDARD_DEPENDENCIES = [ "mock", "asyncmock", diff --git a/packages/db-dtypes/samples/snippets/noxfile.py b/packages/db-dtypes/samples/snippets/noxfile.py index 1224cbe212e4..3b7135946fd5 100644 --- a/packages/db-dtypes/samples/snippets/noxfile.py +++ b/packages/db-dtypes/samples/snippets/noxfile.py @@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] diff --git a/packages/db-dtypes/setup.py b/packages/db-dtypes/setup.py index cb24515d20ae..e91b109130f5 100644 --- a/packages/db-dtypes/setup.py +++ b/packages/db-dtypes/setup.py @@ -66,6 +66,8 @@ def readme(): "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Operating System :: OS Independent", "Topic :: Database :: Front-Ends", ], diff --git a/packages/db-dtypes/testing/constraints-3.12.txt b/packages/db-dtypes/testing/constraints-3.12.txt new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/db-dtypes/testing/constraints-3.6.txt b/packages/db-dtypes/testing/constraints-3.6.txt deleted file mode 100644 index a7388cdc2344..000000000000 --- a/packages/db-dtypes/testing/constraints-3.6.txt +++ /dev/null @@ -1,11 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List *all* library dependencies and extras in this file. -# Pin the version to the lower bound. -# -# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", -packaging==17.0 -# Make sure we test with pandas 0.24.2. The Python version isn't that relevant. -pandas==0.24.2 -pyarrow==3.0.0 -numpy==1.16.6 From 5b1d8a2d2bf55e0d2da0a1e0575ff4736a95b112 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 1 Dec 2023 06:14:52 -0500 Subject: [PATCH 132/210] chore: bump cryptography from 41.0.5 to 41.0.6 in /synthtool/gcp/templates/python_library/.kokoro (#225) Source-Link: https://github.com/googleapis/synthtool/commit/9367caadcbb30b5b2719f30eb00c44cc913550ed Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:2f155882785883336b4468d5218db737bb1d10c9cea7cb62219ad16fe248c03c Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 4 +- packages/db-dtypes/.kokoro/requirements.txt | 48 ++++++++++---------- 2 files changed, 26 insertions(+), 26 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index eb4d9f794dc1..773c1dfd2146 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:bacc3af03bff793a03add584537b36b5644342931ad989e3ba1171d3bd5399f5 -# created: 2023-11-23T18:17:28.105124211Z + digest: sha256:2f155882785883336b4468d5218db737bb1d10c9cea7cb62219ad16fe248c03c +# created: 2023-11-29T14:54:29.548172703Z diff --git a/packages/db-dtypes/.kokoro/requirements.txt b/packages/db-dtypes/.kokoro/requirements.txt index 8957e21104e2..e5c1ffca94b7 100644 --- a/packages/db-dtypes/.kokoro/requirements.txt +++ b/packages/db-dtypes/.kokoro/requirements.txt @@ -93,30 +93,30 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -cryptography==41.0.5 \ - --hash=sha256:0c327cac00f082013c7c9fb6c46b7cc9fa3c288ca702c74773968173bda421bf \ - --hash=sha256:0d2a6a598847c46e3e321a7aef8af1436f11c27f1254933746304ff014664d84 \ - --hash=sha256:227ec057cd32a41c6651701abc0328135e472ed450f47c2766f23267b792a88e \ - --hash=sha256:22892cc830d8b2c89ea60148227631bb96a7da0c1b722f2aac8824b1b7c0b6b8 \ - --hash=sha256:392cb88b597247177172e02da6b7a63deeff1937fa6fec3bbf902ebd75d97ec7 \ - --hash=sha256:3be3ca726e1572517d2bef99a818378bbcf7d7799d5372a46c79c29eb8d166c1 \ - --hash=sha256:573eb7128cbca75f9157dcde974781209463ce56b5804983e11a1c462f0f4e88 \ - --hash=sha256:580afc7b7216deeb87a098ef0674d6ee34ab55993140838b14c9b83312b37b86 \ - --hash=sha256:5a70187954ba7292c7876734183e810b728b4f3965fbe571421cb2434d279179 \ - --hash=sha256:73801ac9736741f220e20435f84ecec75ed70eda90f781a148f1bad546963d81 \ - --hash=sha256:7d208c21e47940369accfc9e85f0de7693d9a5d843c2509b3846b2db170dfd20 \ - --hash=sha256:8254962e6ba1f4d2090c44daf50a547cd5f0bf446dc658a8e5f8156cae0d8548 \ - --hash=sha256:88417bff20162f635f24f849ab182b092697922088b477a7abd6664ddd82291d \ - --hash=sha256:a48e74dad1fb349f3dc1d449ed88e0017d792997a7ad2ec9587ed17405667e6d \ - --hash=sha256:b948e09fe5fb18517d99994184854ebd50b57248736fd4c720ad540560174ec5 \ - --hash=sha256:c707f7afd813478e2019ae32a7c49cd932dd60ab2d2a93e796f68236b7e1fbf1 \ - --hash=sha256:d38e6031e113b7421db1de0c1b1f7739564a88f1684c6b89234fbf6c11b75147 \ - --hash=sha256:d3977f0e276f6f5bf245c403156673db103283266601405376f075c849a0b936 \ - --hash=sha256:da6a0ff8f1016ccc7477e6339e1d50ce5f59b88905585f77193ebd5068f1e797 \ - --hash=sha256:e270c04f4d9b5671ebcc792b3ba5d4488bf7c42c3c241a3748e2599776f29696 \ - --hash=sha256:e886098619d3815e0ad5790c973afeee2c0e6e04b4da90b88e6bd06e2a0b1b72 \ - --hash=sha256:ec3b055ff8f1dce8e6ef28f626e0972981475173d7973d63f271b29c8a2897da \ - --hash=sha256:fba1e91467c65fe64a82c689dc6cf58151158993b13eb7a7f3f4b7f395636723 +cryptography==41.0.6 \ + --hash=sha256:068bc551698c234742c40049e46840843f3d98ad7ce265fd2bd4ec0d11306596 \ + --hash=sha256:0f27acb55a4e77b9be8d550d762b0513ef3fc658cd3eb15110ebbcbd626db12c \ + --hash=sha256:2132d5865eea673fe6712c2ed5fb4fa49dba10768bb4cc798345748380ee3660 \ + --hash=sha256:3288acccef021e3c3c10d58933f44e8602cf04dba96d9796d70d537bb2f4bbc4 \ + --hash=sha256:35f3f288e83c3f6f10752467c48919a7a94b7d88cc00b0668372a0d2ad4f8ead \ + --hash=sha256:398ae1fc711b5eb78e977daa3cbf47cec20f2c08c5da129b7a296055fbb22aed \ + --hash=sha256:422e3e31d63743855e43e5a6fcc8b4acab860f560f9321b0ee6269cc7ed70cc3 \ + --hash=sha256:48783b7e2bef51224020efb61b42704207dde583d7e371ef8fc2a5fb6c0aabc7 \ + --hash=sha256:4d03186af98b1c01a4eda396b137f29e4e3fb0173e30f885e27acec8823c1b09 \ + --hash=sha256:5daeb18e7886a358064a68dbcaf441c036cbdb7da52ae744e7b9207b04d3908c \ + --hash=sha256:60e746b11b937911dc70d164060d28d273e31853bb359e2b2033c9e93e6f3c43 \ + --hash=sha256:742ae5e9a2310e9dade7932f9576606836ed174da3c7d26bc3d3ab4bd49b9f65 \ + --hash=sha256:7e00fb556bda398b99b0da289ce7053639d33b572847181d6483ad89835115f6 \ + --hash=sha256:85abd057699b98fce40b41737afb234fef05c67e116f6f3650782c10862c43da \ + --hash=sha256:8efb2af8d4ba9dbc9c9dd8f04d19a7abb5b49eab1f3694e7b5a16a5fc2856f5c \ + --hash=sha256:ae236bb8760c1e55b7a39b6d4d32d2279bc6c7c8500b7d5a13b6fb9fc97be35b \ + --hash=sha256:afda76d84b053923c27ede5edc1ed7d53e3c9f475ebaf63c68e69f1403c405a8 \ + --hash=sha256:b27a7fd4229abef715e064269d98a7e2909ebf92eb6912a9603c7e14c181928c \ + --hash=sha256:b648fe2a45e426aaee684ddca2632f62ec4613ef362f4d681a9a6283d10e079d \ + --hash=sha256:c5a550dc7a3b50b116323e3d376241829fd326ac47bc195e04eb33a8170902a9 \ + --hash=sha256:da46e2b5df770070412c46f87bac0849b8d685c5f2679771de277a422c7d0b86 \ + --hash=sha256:f39812f70fc5c71a15aa3c97b2bbe213c3f2a460b79bd21c40d033bb34a9bf36 \ + --hash=sha256:ff369dd19e8fe0528b02e8df9f2aeb2479f89b1270d90f96a63500afe9af5cae # via # gcp-releasetool # secretstorage From 30f26c2ebbf9e8daa65a81f1dfc6735a65d13960 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 4 Dec 2023 12:41:47 -0500 Subject: [PATCH 133/210] build: treat warnings as errors (#228) * build: treat warnings as errors * ignore warnings which only appear in 3.7/3.8 * ignore warnings which only appear in 3.7/3.8 --- packages/db-dtypes/pytest.ini | 14 ++++++++++++++ 1 file changed, 14 insertions(+) create mode 100644 packages/db-dtypes/pytest.ini diff --git a/packages/db-dtypes/pytest.ini b/packages/db-dtypes/pytest.ini new file mode 100644 index 000000000000..c58342dda4bc --- /dev/null +++ b/packages/db-dtypes/pytest.ini @@ -0,0 +1,14 @@ +[pytest] +filterwarnings = + # treat all warnings as errors + error + # Remove once support for python 3.7 and 3.8 is dropped + # Ignore warnings from older versions of pandas which still have python 3.7/3.8 support + ignore:.*distutils Version classes are deprecated:DeprecationWarning + ignore:.*resolve package from __spec__ or __package__, falling back on __name__ and __path__:ImportWarning + # Remove once https://github.com/dateutil/dateutil/issues/1314 is fixed + # dateutil is a dependency of pandas + ignore:datetime.datetime.utcfromtimestamp\(\) is deprecated:DeprecationWarning:dateutil.tz.tz + # Remove once https://github.com/googleapis/python-db-dtypes-pandas/issues/227 is fixed + ignore:.*any.*with datetime64 dtypes is deprecated and will raise in a future version:FutureWarning + ignore:.*all.*with datetime64 dtypes is deprecated and will raise in a future version:FutureWarning From f99266eb841f3fb6802e005a23c3a57769b62bc3 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 8 Dec 2023 13:05:57 +0100 Subject: [PATCH 134/210] chore(deps): update all dependencies (#229) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- packages/db-dtypes/.github/workflows/compliance.yml | 4 ++-- packages/db-dtypes/.github/workflows/unittest-prerelease.yml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/db-dtypes/.github/workflows/compliance.yml b/packages/db-dtypes/.github/workflows/compliance.yml index d876cf3ed925..90a7c8394de3 100644 --- a/packages/db-dtypes/.github/workflows/compliance.yml +++ b/packages/db-dtypes/.github/workflows/compliance.yml @@ -13,7 +13,7 @@ jobs: - name: Checkout uses: actions/checkout@v4 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python }} - name: Install nox @@ -34,7 +34,7 @@ jobs: - name: Checkout uses: actions/checkout@v4 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python }} - name: Install nox diff --git a/packages/db-dtypes/.github/workflows/unittest-prerelease.yml b/packages/db-dtypes/.github/workflows/unittest-prerelease.yml index 86d241e732d6..05787129de4c 100644 --- a/packages/db-dtypes/.github/workflows/unittest-prerelease.yml +++ b/packages/db-dtypes/.github/workflows/unittest-prerelease.yml @@ -13,7 +13,7 @@ jobs: - name: Checkout uses: actions/checkout@v4 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python }} - name: Install nox From e6498c1527c68fbc463298bc02fe44135b83f78c Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sun, 10 Dec 2023 09:01:31 -0500 Subject: [PATCH 135/210] build: update actions/checkout and actions/setup-python (#231) Source-Link: https://github.com/googleapis/synthtool/commit/3551acd1261fd8f616cbfd054cda9bd6d6ac75f4 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:230f7fe8a0d2ed81a519cfc15c6bb11c5b46b9fb449b8b1219b3771bcb520ad2 Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 4 ++-- packages/db-dtypes/.github/workflows/docs.yml | 8 ++++---- packages/db-dtypes/.github/workflows/lint.yml | 4 ++-- packages/db-dtypes/.github/workflows/unittest.yml | 8 ++++---- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 773c1dfd2146..40bf99731959 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:2f155882785883336b4468d5218db737bb1d10c9cea7cb62219ad16fe248c03c -# created: 2023-11-29T14:54:29.548172703Z + digest: sha256:230f7fe8a0d2ed81a519cfc15c6bb11c5b46b9fb449b8b1219b3771bcb520ad2 +# created: 2023-12-09T15:16:25.430769578Z diff --git a/packages/db-dtypes/.github/workflows/docs.yml b/packages/db-dtypes/.github/workflows/docs.yml index 221806cedf58..698fbc5c94da 100644 --- a/packages/db-dtypes/.github/workflows/docs.yml +++ b/packages/db-dtypes/.github/workflows/docs.yml @@ -8,9 +8,9 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.9" - name: Install nox @@ -24,9 +24,9 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.10" - name: Install nox diff --git a/packages/db-dtypes/.github/workflows/lint.yml b/packages/db-dtypes/.github/workflows/lint.yml index 16d5a9e90f6d..4866193af2a9 100644 --- a/packages/db-dtypes/.github/workflows/lint.yml +++ b/packages/db-dtypes/.github/workflows/lint.yml @@ -8,9 +8,9 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.8" - name: Install nox diff --git a/packages/db-dtypes/.github/workflows/unittest.yml b/packages/db-dtypes/.github/workflows/unittest.yml index a32027b49bc2..d6ca65627c2d 100644 --- a/packages/db-dtypes/.github/workflows/unittest.yml +++ b/packages/db-dtypes/.github/workflows/unittest.yml @@ -11,9 +11,9 @@ jobs: python: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12'] steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python }} - name: Install nox @@ -37,9 +37,9 @@ jobs: - unit steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.8" - name: Install coverage From b1ac77cbda2c2fc5321862d17f8e1ad534f5d269 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 11 Dec 2023 14:59:48 -0800 Subject: [PATCH 136/210] chore(main): release 1.2.0 (#190) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/db-dtypes/CHANGELOG.md | 17 +++++++++++++++++ packages/db-dtypes/db_dtypes/version.py | 2 +- 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/packages/db-dtypes/CHANGELOG.md b/packages/db-dtypes/CHANGELOG.md index 431801f42ed1..3c48e7b334ee 100644 --- a/packages/db-dtypes/CHANGELOG.md +++ b/packages/db-dtypes/CHANGELOG.md @@ -1,5 +1,22 @@ # Changelog +## [1.2.0](https://github.com/googleapis/python-db-dtypes-pandas/compare/v1.1.1...v1.2.0) (2023-12-10) + + +### Features + +* Add support for Python 3.12 ([#223](https://github.com/googleapis/python-db-dtypes-pandas/issues/223)) ([1338425](https://github.com/googleapis/python-db-dtypes-pandas/commit/1338425ad765be4613bcf3fcfa7f6ce964de04a3)) + + +### Bug Fixes + +* Adds xfail marks to tests that are known to fail ([#189](https://github.com/googleapis/python-db-dtypes-pandas/issues/189)) ([4a56b76](https://github.com/googleapis/python-db-dtypes-pandas/commit/4a56b766b0ccba900a555167863f1081a76c4c0d)) + + +### Documentation + +* Update pandas extension link ([#210](https://github.com/googleapis/python-db-dtypes-pandas/issues/210)) ([668988f](https://github.com/googleapis/python-db-dtypes-pandas/commit/668988f0f1c25a9d50a7ad5523933e42553b5210)) + ## [1.1.1](https://github.com/googleapis/python-db-dtypes-pandas/compare/v1.1.0...v1.1.1) (2023-03-30) diff --git a/packages/db-dtypes/db_dtypes/version.py b/packages/db-dtypes/db_dtypes/version.py index 7494067f71b8..82681053c986 100644 --- a/packages/db-dtypes/db_dtypes/version.py +++ b/packages/db-dtypes/db_dtypes/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "1.1.1" +__version__ = "1.2.0" From db594fc261895126dc0a18ad6f46499d797c21ba Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 14 Dec 2023 19:18:37 -0500 Subject: [PATCH 137/210] build: update actions/upload-artifact and actions/download-artifact (#233) Source-Link: https://github.com/googleapis/synthtool/commit/280ddaed417057dfe5b1395731de07b7d09f5058 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:346ab2efb51649c5dde7756cbbdc60dd394852ba83b9bbffc292a63549f33c17 Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 4 ++-- packages/db-dtypes/.github/workflows/unittest.yml | 10 +++++----- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 40bf99731959..9bee24097165 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:230f7fe8a0d2ed81a519cfc15c6bb11c5b46b9fb449b8b1219b3771bcb520ad2 -# created: 2023-12-09T15:16:25.430769578Z + digest: sha256:346ab2efb51649c5dde7756cbbdc60dd394852ba83b9bbffc292a63549f33c17 +# created: 2023-12-14T22:17:57.611773021Z diff --git a/packages/db-dtypes/.github/workflows/unittest.yml b/packages/db-dtypes/.github/workflows/unittest.yml index d6ca65627c2d..f4a337c496a0 100644 --- a/packages/db-dtypes/.github/workflows/unittest.yml +++ b/packages/db-dtypes/.github/workflows/unittest.yml @@ -26,9 +26,9 @@ jobs: run: | nox -s unit-${{ matrix.python }} - name: Upload coverage results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: - name: coverage-artifacts + name: coverage-artifact-${{ matrix.python }} path: .coverage-${{ matrix.python }} cover: @@ -47,11 +47,11 @@ jobs: python -m pip install --upgrade setuptools pip wheel python -m pip install coverage - name: Download coverage results - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: - name: coverage-artifacts path: .coverage-results/ - name: Report coverage results run: | - coverage combine .coverage-results/.coverage* + find .coverage-results -type f -name '*.zip' -exec unzip {} \; + coverage combine .coverage-results/**/.coverage* coverage report --show-missing --fail-under=100 From 44100dc281981db6aba4b8d28c74b5bb9810aa99 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 15 Dec 2023 01:23:01 +0100 Subject: [PATCH 138/210] chore(deps): update all dependencies to v4 (#232) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies to v4 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- packages/db-dtypes/.github/workflows/unittest-prerelease.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/db-dtypes/.github/workflows/unittest-prerelease.yml b/packages/db-dtypes/.github/workflows/unittest-prerelease.yml index 05787129de4c..9d30c755af17 100644 --- a/packages/db-dtypes/.github/workflows/unittest-prerelease.yml +++ b/packages/db-dtypes/.github/workflows/unittest-prerelease.yml @@ -26,7 +26,7 @@ jobs: run: | nox -s unit_prerelease - name: Upload coverage results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: coverage-artifacts path: .coverage-${{ matrix.python }} From 2712ae3ded527058b916f0d5bbada065afc60253 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 16 Jan 2024 12:46:09 -0800 Subject: [PATCH 139/210] build(python): fix `docs` and `docfx` builds (#237) Source-Link: https://github.com/googleapis/synthtool/commit/fac8444edd5f5526e804c306b766a271772a3e2f Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:5ea6d0ab82c956b50962f91d94e206d3921537ae5fe1549ec5326381d8905cfa Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 6 +++--- packages/db-dtypes/.kokoro/requirements.txt | 6 +++--- packages/db-dtypes/noxfile.py | 20 +++++++++++++++++++- 3 files changed, 25 insertions(+), 7 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 9bee24097165..d8a1bbca7179 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:346ab2efb51649c5dde7756cbbdc60dd394852ba83b9bbffc292a63549f33c17 -# created: 2023-12-14T22:17:57.611773021Z + digest: sha256:5ea6d0ab82c956b50962f91d94e206d3921537ae5fe1549ec5326381d8905cfa +# created: 2024-01-15T16:32:08.142785673Z diff --git a/packages/db-dtypes/.kokoro/requirements.txt b/packages/db-dtypes/.kokoro/requirements.txt index e5c1ffca94b7..bb3d6ca38b14 100644 --- a/packages/db-dtypes/.kokoro/requirements.txt +++ b/packages/db-dtypes/.kokoro/requirements.txt @@ -263,9 +263,9 @@ jeepney==0.8.0 \ # via # keyring # secretstorage -jinja2==3.1.2 \ - --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ - --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 +jinja2==3.1.3 \ + --hash=sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa \ + --hash=sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90 # via gcp-releasetool keyring==24.2.0 \ --hash=sha256:4901caaf597bfd3bbd78c9a0c7c4c29fcd8310dab2cffefe749e916b6527acd6 \ diff --git a/packages/db-dtypes/noxfile.py b/packages/db-dtypes/noxfile.py index c9432dc0564a..36c65540292d 100644 --- a/packages/db-dtypes/noxfile.py +++ b/packages/db-dtypes/noxfile.py @@ -378,7 +378,16 @@ def docs(session): session.install("-e", ".") session.install( - "sphinx==4.0.1", + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", "alabaster", "recommonmark", ) @@ -404,6 +413,15 @@ def docfx(session): session.install("-e", ".") session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", "gcp-sphinx-docfx-yaml", "alabaster", "recommonmark", From 5c38bd7c9ebbdbc7ce12965b1697921d66ee6a64 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 19 Jan 2024 20:35:22 +0100 Subject: [PATCH 140/210] chore(deps): update dependency pytest to v7.4.4 (#235) Co-authored-by: Lingqing Gan --- packages/db-dtypes/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/db-dtypes/samples/snippets/requirements-test.txt b/packages/db-dtypes/samples/snippets/requirements-test.txt index f9708e4b7cf1..cb87efc0ff71 100644 --- a/packages/db-dtypes/samples/snippets/requirements-test.txt +++ b/packages/db-dtypes/samples/snippets/requirements-test.txt @@ -1 +1 @@ -pytest==7.4.3 +pytest==7.4.4 From 8134a7049c9be839d0246761d61ae5e195388487 Mon Sep 17 00:00:00 2001 From: Lingqing Gan Date: Fri, 19 Jan 2024 12:08:26 -0800 Subject: [PATCH 141/210] test: avoid prerelease test with pandas==2.2.0rc0 (#238) Co-authored-by: Chalmer Lowe --- packages/db-dtypes/noxfile.py | 5 ++++- packages/db-dtypes/owlbot.py | 5 ++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/packages/db-dtypes/noxfile.py b/packages/db-dtypes/noxfile.py index 36c65540292d..102670a8574b 100644 --- a/packages/db-dtypes/noxfile.py +++ b/packages/db-dtypes/noxfile.py @@ -201,13 +201,16 @@ def prerelease(session, tests_path): "--upgrade", "pyarrow", ) + # Avoid pandas==2.2.0rc0 as this version causes PyArrow to fail. Once newer + # prerelease comes out, this constraint can be removed. See + # https://github.com/googleapis/python-db-dtypes-pandas/issues/234 session.install( "--extra-index-url", "https://pypi.anaconda.org/scipy-wheels-nightly/simple", "--prefer-binary", "--pre", "--upgrade", - "pandas", + "pandas!=2.2.0rc0", ) session.install( "mock", diff --git a/packages/db-dtypes/owlbot.py b/packages/db-dtypes/owlbot.py index 4b89096dfad5..d1b3c088775f 100644 --- a/packages/db-dtypes/owlbot.py +++ b/packages/db-dtypes/owlbot.py @@ -109,13 +109,16 @@ def prerelease(session, tests_path): "--upgrade", "pyarrow", ) + # Avoid pandas==2.2.0rc0 as this version causes PyArrow to fail. Once newer + # prerelease comes out, this constraint can be removed. See + # https://github.com/googleapis/python-db-dtypes-pandas/issues/234 session.install( "--extra-index-url", "https://pypi.anaconda.org/scipy-wheels-nightly/simple", "--prefer-binary", "--pre", "--upgrade", - "pandas", + "pandas!=2.2.0rc0", ) session.install( "mock", From 5b179109d92edb14609351ff2d8fc035e2604780 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 5 Feb 2024 22:22:59 +0100 Subject: [PATCH 142/210] chore(deps): update dependency pytest to v8 (#239) * chore(deps): update dependency pytest to v8 * pin pytest == 7.4.4 for python 3.7 --------- Co-authored-by: Lingqing Gan --- packages/db-dtypes/samples/snippets/requirements-test.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/db-dtypes/samples/snippets/requirements-test.txt b/packages/db-dtypes/samples/snippets/requirements-test.txt index cb87efc0ff71..2f37938b85df 100644 --- a/packages/db-dtypes/samples/snippets/requirements-test.txt +++ b/packages/db-dtypes/samples/snippets/requirements-test.txt @@ -1 +1,2 @@ -pytest==7.4.4 +pytest==7.4.4; python_version == '3.7' +pytest==8.0.0; python_version > '3.7' From 49aa246fc2a6f80fa3d54d99280b06a4168f2f55 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 7 Feb 2024 10:22:26 -0800 Subject: [PATCH 143/210] build(deps): bump cryptography from 41.0.6 to 42.0.0 in /synthtool/gcp/templates/python_library/.kokoro (#242) Source-Link: https://github.com/googleapis/synthtool/commit/e13b22b1f660c80e4c3e735a9177d2f16c4b8bdc Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:97b671488ad548ef783a452a9e1276ac10f144d5ae56d98cc4bf77ba504082b4 Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 4 +- packages/db-dtypes/.kokoro/requirements.txt | 57 +++++++++++--------- 2 files changed, 35 insertions(+), 26 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index d8a1bbca7179..2aefd0e91175 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:5ea6d0ab82c956b50962f91d94e206d3921537ae5fe1549ec5326381d8905cfa -# created: 2024-01-15T16:32:08.142785673Z + digest: sha256:97b671488ad548ef783a452a9e1276ac10f144d5ae56d98cc4bf77ba504082b4 +# created: 2024-02-06T03:20:16.660474034Z diff --git a/packages/db-dtypes/.kokoro/requirements.txt b/packages/db-dtypes/.kokoro/requirements.txt index bb3d6ca38b14..8c11c9f3e9b6 100644 --- a/packages/db-dtypes/.kokoro/requirements.txt +++ b/packages/db-dtypes/.kokoro/requirements.txt @@ -93,30 +93,39 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -cryptography==41.0.6 \ - --hash=sha256:068bc551698c234742c40049e46840843f3d98ad7ce265fd2bd4ec0d11306596 \ - --hash=sha256:0f27acb55a4e77b9be8d550d762b0513ef3fc658cd3eb15110ebbcbd626db12c \ - --hash=sha256:2132d5865eea673fe6712c2ed5fb4fa49dba10768bb4cc798345748380ee3660 \ - --hash=sha256:3288acccef021e3c3c10d58933f44e8602cf04dba96d9796d70d537bb2f4bbc4 \ - --hash=sha256:35f3f288e83c3f6f10752467c48919a7a94b7d88cc00b0668372a0d2ad4f8ead \ - --hash=sha256:398ae1fc711b5eb78e977daa3cbf47cec20f2c08c5da129b7a296055fbb22aed \ - --hash=sha256:422e3e31d63743855e43e5a6fcc8b4acab860f560f9321b0ee6269cc7ed70cc3 \ - --hash=sha256:48783b7e2bef51224020efb61b42704207dde583d7e371ef8fc2a5fb6c0aabc7 \ - --hash=sha256:4d03186af98b1c01a4eda396b137f29e4e3fb0173e30f885e27acec8823c1b09 \ - --hash=sha256:5daeb18e7886a358064a68dbcaf441c036cbdb7da52ae744e7b9207b04d3908c \ - --hash=sha256:60e746b11b937911dc70d164060d28d273e31853bb359e2b2033c9e93e6f3c43 \ - --hash=sha256:742ae5e9a2310e9dade7932f9576606836ed174da3c7d26bc3d3ab4bd49b9f65 \ - --hash=sha256:7e00fb556bda398b99b0da289ce7053639d33b572847181d6483ad89835115f6 \ - --hash=sha256:85abd057699b98fce40b41737afb234fef05c67e116f6f3650782c10862c43da \ - --hash=sha256:8efb2af8d4ba9dbc9c9dd8f04d19a7abb5b49eab1f3694e7b5a16a5fc2856f5c \ - --hash=sha256:ae236bb8760c1e55b7a39b6d4d32d2279bc6c7c8500b7d5a13b6fb9fc97be35b \ - --hash=sha256:afda76d84b053923c27ede5edc1ed7d53e3c9f475ebaf63c68e69f1403c405a8 \ - --hash=sha256:b27a7fd4229abef715e064269d98a7e2909ebf92eb6912a9603c7e14c181928c \ - --hash=sha256:b648fe2a45e426aaee684ddca2632f62ec4613ef362f4d681a9a6283d10e079d \ - --hash=sha256:c5a550dc7a3b50b116323e3d376241829fd326ac47bc195e04eb33a8170902a9 \ - --hash=sha256:da46e2b5df770070412c46f87bac0849b8d685c5f2679771de277a422c7d0b86 \ - --hash=sha256:f39812f70fc5c71a15aa3c97b2bbe213c3f2a460b79bd21c40d033bb34a9bf36 \ - --hash=sha256:ff369dd19e8fe0528b02e8df9f2aeb2479f89b1270d90f96a63500afe9af5cae +cryptography==42.0.0 \ + --hash=sha256:0a68bfcf57a6887818307600c3c0ebc3f62fbb6ccad2240aa21887cda1f8df1b \ + --hash=sha256:146e971e92a6dd042214b537a726c9750496128453146ab0ee8971a0299dc9bd \ + --hash=sha256:14e4b909373bc5bf1095311fa0f7fcabf2d1a160ca13f1e9e467be1ac4cbdf94 \ + --hash=sha256:206aaf42e031b93f86ad60f9f5d9da1b09164f25488238ac1dc488334eb5e221 \ + --hash=sha256:3005166a39b70c8b94455fdbe78d87a444da31ff70de3331cdec2c568cf25b7e \ + --hash=sha256:324721d93b998cb7367f1e6897370644751e5580ff9b370c0a50dc60a2003513 \ + --hash=sha256:33588310b5c886dfb87dba5f013b8d27df7ffd31dc753775342a1e5ab139e59d \ + --hash=sha256:35cf6ed4c38f054478a9df14f03c1169bb14bd98f0b1705751079b25e1cb58bc \ + --hash=sha256:3ca482ea80626048975360c8e62be3ceb0f11803180b73163acd24bf014133a0 \ + --hash=sha256:56ce0c106d5c3fec1038c3cca3d55ac320a5be1b44bf15116732d0bc716979a2 \ + --hash=sha256:5a217bca51f3b91971400890905a9323ad805838ca3fa1e202a01844f485ee87 \ + --hash=sha256:678cfa0d1e72ef41d48993a7be75a76b0725d29b820ff3cfd606a5b2b33fda01 \ + --hash=sha256:69fd009a325cad6fbfd5b04c711a4da563c6c4854fc4c9544bff3088387c77c0 \ + --hash=sha256:6cf9b76d6e93c62114bd19485e5cb003115c134cf9ce91f8ac924c44f8c8c3f4 \ + --hash=sha256:74f18a4c8ca04134d2052a140322002fef535c99cdbc2a6afc18a8024d5c9d5b \ + --hash=sha256:85f759ed59ffd1d0baad296e72780aa62ff8a71f94dc1ab340386a1207d0ea81 \ + --hash=sha256:87086eae86a700307b544625e3ba11cc600c3c0ef8ab97b0fda0705d6db3d4e3 \ + --hash=sha256:8814722cffcfd1fbd91edd9f3451b88a8f26a5fd41b28c1c9193949d1c689dc4 \ + --hash=sha256:8fedec73d590fd30c4e3f0d0f4bc961aeca8390c72f3eaa1a0874d180e868ddf \ + --hash=sha256:9515ea7f596c8092fdc9902627e51b23a75daa2c7815ed5aa8cf4f07469212ec \ + --hash=sha256:988b738f56c665366b1e4bfd9045c3efae89ee366ca3839cd5af53eaa1401bce \ + --hash=sha256:a2a8d873667e4fd2f34aedab02ba500b824692c6542e017075a2efc38f60a4c0 \ + --hash=sha256:bd7cf7a8d9f34cc67220f1195884151426ce616fdc8285df9054bfa10135925f \ + --hash=sha256:bdce70e562c69bb089523e75ef1d9625b7417c6297a76ac27b1b8b1eb51b7d0f \ + --hash=sha256:be14b31eb3a293fc6e6aa2807c8a3224c71426f7c4e3639ccf1a2f3ffd6df8c3 \ + --hash=sha256:be41b0c7366e5549265adf2145135dca107718fa44b6e418dc7499cfff6b4689 \ + --hash=sha256:c310767268d88803b653fffe6d6f2f17bb9d49ffceb8d70aed50ad45ea49ab08 \ + --hash=sha256:c58115384bdcfe9c7f644c72f10f6f42bed7cf59f7b52fe1bf7ae0a622b3a139 \ + --hash=sha256:c640b0ef54138fde761ec99a6c7dc4ce05e80420262c20fa239e694ca371d434 \ + --hash=sha256:ca20550bb590db16223eb9ccc5852335b48b8f597e2f6f0878bbfd9e7314eb17 \ + --hash=sha256:d97aae66b7de41cdf5b12087b5509e4e9805ed6f562406dfcf60e8481a9a28f8 \ + --hash=sha256:e9326ca78111e4c645f7e49cbce4ed2f3f85e17b61a563328c85a5208cf34440 # via # gcp-releasetool # secretstorage From dfc4269448fd9e6374ce8570cb0ea1167a31809f Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 20 Feb 2024 12:48:22 -0800 Subject: [PATCH 144/210] build(deps): bump cryptography from 42.0.0 to 42.0.2 in .kokoro (#245) Source-Link: https://github.com/googleapis/synthtool/commit/8d392a55db44b00b4a9b995318051e334eecdcf1 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:a0c4463fcfd9893fc172a3b3db2b6ac0c7b94ec6ad458c7dcea12d9693615ac3 Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 4 +- packages/db-dtypes/.kokoro/requirements.txt | 66 ++++++++++---------- 2 files changed, 35 insertions(+), 35 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 2aefd0e91175..51213ca00ee3 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:97b671488ad548ef783a452a9e1276ac10f144d5ae56d98cc4bf77ba504082b4 -# created: 2024-02-06T03:20:16.660474034Z + digest: sha256:a0c4463fcfd9893fc172a3b3db2b6ac0c7b94ec6ad458c7dcea12d9693615ac3 +# created: 2024-02-17T12:21:23.177926195Z diff --git a/packages/db-dtypes/.kokoro/requirements.txt b/packages/db-dtypes/.kokoro/requirements.txt index 8c11c9f3e9b6..f80bdcd62981 100644 --- a/packages/db-dtypes/.kokoro/requirements.txt +++ b/packages/db-dtypes/.kokoro/requirements.txt @@ -93,39 +93,39 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -cryptography==42.0.0 \ - --hash=sha256:0a68bfcf57a6887818307600c3c0ebc3f62fbb6ccad2240aa21887cda1f8df1b \ - --hash=sha256:146e971e92a6dd042214b537a726c9750496128453146ab0ee8971a0299dc9bd \ - --hash=sha256:14e4b909373bc5bf1095311fa0f7fcabf2d1a160ca13f1e9e467be1ac4cbdf94 \ - --hash=sha256:206aaf42e031b93f86ad60f9f5d9da1b09164f25488238ac1dc488334eb5e221 \ - --hash=sha256:3005166a39b70c8b94455fdbe78d87a444da31ff70de3331cdec2c568cf25b7e \ - --hash=sha256:324721d93b998cb7367f1e6897370644751e5580ff9b370c0a50dc60a2003513 \ - --hash=sha256:33588310b5c886dfb87dba5f013b8d27df7ffd31dc753775342a1e5ab139e59d \ - --hash=sha256:35cf6ed4c38f054478a9df14f03c1169bb14bd98f0b1705751079b25e1cb58bc \ - --hash=sha256:3ca482ea80626048975360c8e62be3ceb0f11803180b73163acd24bf014133a0 \ - --hash=sha256:56ce0c106d5c3fec1038c3cca3d55ac320a5be1b44bf15116732d0bc716979a2 \ - --hash=sha256:5a217bca51f3b91971400890905a9323ad805838ca3fa1e202a01844f485ee87 \ - --hash=sha256:678cfa0d1e72ef41d48993a7be75a76b0725d29b820ff3cfd606a5b2b33fda01 \ - --hash=sha256:69fd009a325cad6fbfd5b04c711a4da563c6c4854fc4c9544bff3088387c77c0 \ - --hash=sha256:6cf9b76d6e93c62114bd19485e5cb003115c134cf9ce91f8ac924c44f8c8c3f4 \ - --hash=sha256:74f18a4c8ca04134d2052a140322002fef535c99cdbc2a6afc18a8024d5c9d5b \ - --hash=sha256:85f759ed59ffd1d0baad296e72780aa62ff8a71f94dc1ab340386a1207d0ea81 \ - --hash=sha256:87086eae86a700307b544625e3ba11cc600c3c0ef8ab97b0fda0705d6db3d4e3 \ - --hash=sha256:8814722cffcfd1fbd91edd9f3451b88a8f26a5fd41b28c1c9193949d1c689dc4 \ - --hash=sha256:8fedec73d590fd30c4e3f0d0f4bc961aeca8390c72f3eaa1a0874d180e868ddf \ - --hash=sha256:9515ea7f596c8092fdc9902627e51b23a75daa2c7815ed5aa8cf4f07469212ec \ - --hash=sha256:988b738f56c665366b1e4bfd9045c3efae89ee366ca3839cd5af53eaa1401bce \ - --hash=sha256:a2a8d873667e4fd2f34aedab02ba500b824692c6542e017075a2efc38f60a4c0 \ - --hash=sha256:bd7cf7a8d9f34cc67220f1195884151426ce616fdc8285df9054bfa10135925f \ - --hash=sha256:bdce70e562c69bb089523e75ef1d9625b7417c6297a76ac27b1b8b1eb51b7d0f \ - --hash=sha256:be14b31eb3a293fc6e6aa2807c8a3224c71426f7c4e3639ccf1a2f3ffd6df8c3 \ - --hash=sha256:be41b0c7366e5549265adf2145135dca107718fa44b6e418dc7499cfff6b4689 \ - --hash=sha256:c310767268d88803b653fffe6d6f2f17bb9d49ffceb8d70aed50ad45ea49ab08 \ - --hash=sha256:c58115384bdcfe9c7f644c72f10f6f42bed7cf59f7b52fe1bf7ae0a622b3a139 \ - --hash=sha256:c640b0ef54138fde761ec99a6c7dc4ce05e80420262c20fa239e694ca371d434 \ - --hash=sha256:ca20550bb590db16223eb9ccc5852335b48b8f597e2f6f0878bbfd9e7314eb17 \ - --hash=sha256:d97aae66b7de41cdf5b12087b5509e4e9805ed6f562406dfcf60e8481a9a28f8 \ - --hash=sha256:e9326ca78111e4c645f7e49cbce4ed2f3f85e17b61a563328c85a5208cf34440 +cryptography==42.0.2 \ + --hash=sha256:087887e55e0b9c8724cf05361357875adb5c20dec27e5816b653492980d20380 \ + --hash=sha256:09a77e5b2e8ca732a19a90c5bca2d124621a1edb5438c5daa2d2738bfeb02589 \ + --hash=sha256:130c0f77022b2b9c99d8cebcdd834d81705f61c68e91ddd614ce74c657f8b3ea \ + --hash=sha256:141e2aa5ba100d3788c0ad7919b288f89d1fe015878b9659b307c9ef867d3a65 \ + --hash=sha256:28cb2c41f131a5758d6ba6a0504150d644054fd9f3203a1e8e8d7ac3aea7f73a \ + --hash=sha256:2f9f14185962e6a04ab32d1abe34eae8a9001569ee4edb64d2304bf0d65c53f3 \ + --hash=sha256:320948ab49883557a256eab46149df79435a22d2fefd6a66fe6946f1b9d9d008 \ + --hash=sha256:36d4b7c4be6411f58f60d9ce555a73df8406d484ba12a63549c88bd64f7967f1 \ + --hash=sha256:3b15c678f27d66d247132cbf13df2f75255627bcc9b6a570f7d2fd08e8c081d2 \ + --hash=sha256:3dbd37e14ce795b4af61b89b037d4bc157f2cb23e676fa16932185a04dfbf635 \ + --hash=sha256:4383b47f45b14459cab66048d384614019965ba6c1a1a141f11b5a551cace1b2 \ + --hash=sha256:44c95c0e96b3cb628e8452ec060413a49002a247b2b9938989e23a2c8291fc90 \ + --hash=sha256:4b063d3413f853e056161eb0c7724822a9740ad3caa24b8424d776cebf98e7ee \ + --hash=sha256:52ed9ebf8ac602385126c9a2fe951db36f2cb0c2538d22971487f89d0de4065a \ + --hash=sha256:55d1580e2d7e17f45d19d3b12098e352f3a37fe86d380bf45846ef257054b242 \ + --hash=sha256:5ef9bc3d046ce83c4bbf4c25e1e0547b9c441c01d30922d812e887dc5f125c12 \ + --hash=sha256:5fa82a26f92871eca593b53359c12ad7949772462f887c35edaf36f87953c0e2 \ + --hash=sha256:61321672b3ac7aade25c40449ccedbc6db72c7f5f0fdf34def5e2f8b51ca530d \ + --hash=sha256:701171f825dcab90969596ce2af253143b93b08f1a716d4b2a9d2db5084ef7be \ + --hash=sha256:841ec8af7a8491ac76ec5a9522226e287187a3107e12b7d686ad354bb78facee \ + --hash=sha256:8a06641fb07d4e8f6c7dda4fc3f8871d327803ab6542e33831c7ccfdcb4d0ad6 \ + --hash=sha256:8e88bb9eafbf6a4014d55fb222e7360eef53e613215085e65a13290577394529 \ + --hash=sha256:a00aee5d1b6c20620161984f8ab2ab69134466c51f58c052c11b076715e72929 \ + --hash=sha256:a047682d324ba56e61b7ea7c7299d51e61fd3bca7dad2ccc39b72bd0118d60a1 \ + --hash=sha256:a7ef8dd0bf2e1d0a27042b231a3baac6883cdd5557036f5e8df7139255feaac6 \ + --hash=sha256:ad28cff53f60d99a928dfcf1e861e0b2ceb2bc1f08a074fdd601b314e1cc9e0a \ + --hash=sha256:b9097a208875fc7bbeb1286d0125d90bdfed961f61f214d3f5be62cd4ed8a446 \ + --hash=sha256:b97fe7d7991c25e6a31e5d5e795986b18fbbb3107b873d5f3ae6dc9a103278e9 \ + --hash=sha256:e0ec52ba3c7f1b7d813cd52649a5b3ef1fc0d433219dc8c93827c57eab6cf888 \ + --hash=sha256:ea2c3ffb662fec8bbbfce5602e2c159ff097a4631d96235fcf0fb00e59e3ece4 \ + --hash=sha256:fa3dec4ba8fb6e662770b74f62f1a0c7d4e37e25b58b2bf2c1be4c95372b4a33 \ + --hash=sha256:fbeb725c9dc799a574518109336acccaf1303c30d45c075c665c0793c2f79a7f # via # gcp-releasetool # secretstorage From 0a9ec550d58da30f7660162a238eeae629e787c4 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 20 Feb 2024 21:58:24 +0100 Subject: [PATCH 145/210] chore(deps): update dependency pytest to v8.0.1 (#243) * chore(deps): update dependency pytest to v8.0.1 * pin pytest version for python==3.7 --------- Co-authored-by: Lingqing Gan --- packages/db-dtypes/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/db-dtypes/samples/snippets/requirements-test.txt b/packages/db-dtypes/samples/snippets/requirements-test.txt index 2f37938b85df..ac846aabcedc 100644 --- a/packages/db-dtypes/samples/snippets/requirements-test.txt +++ b/packages/db-dtypes/samples/snippets/requirements-test.txt @@ -1,2 +1,2 @@ pytest==7.4.4; python_version == '3.7' -pytest==8.0.0; python_version > '3.7' +pytest==8.0.1; python_version > '3.7' From 1f5dc5390e79cd0344fd9bb62bec1692164afd1f Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 26 Feb 2024 22:11:57 +0100 Subject: [PATCH 146/210] chore(deps): update dependency pytest to v8.0.2 (#247) * chore(deps): update dependency pytest to v8.0.2 * pin pytest==7.4.4 for python 3.7 --------- Co-authored-by: Lingqing Gan --- packages/db-dtypes/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/db-dtypes/samples/snippets/requirements-test.txt b/packages/db-dtypes/samples/snippets/requirements-test.txt index ac846aabcedc..4f8692e74482 100644 --- a/packages/db-dtypes/samples/snippets/requirements-test.txt +++ b/packages/db-dtypes/samples/snippets/requirements-test.txt @@ -1,2 +1,2 @@ pytest==7.4.4; python_version == '3.7' -pytest==8.0.1; python_version > '3.7' +pytest==8.0.2; python_version > '3.7' From a90965cf5430b5acbbb4bbf9c5049bcefb2b9743 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 27 Feb 2024 13:54:52 -0800 Subject: [PATCH 147/210] build(deps): bump cryptography from 42.0.2 to 42.0.4 in .kokoro (#248) Source-Link: https://github.com/googleapis/synthtool/commit/d895aec3679ad22aa120481f746bf9f2f325f26f Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:98f3afd11308259de6e828e37376d18867fd321aba07826e29e4f8d9cab56bad Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 4 +- packages/db-dtypes/.kokoro/requirements.txt | 66 ++++++++++---------- 2 files changed, 35 insertions(+), 35 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 51213ca00ee3..e4e943e0259a 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:a0c4463fcfd9893fc172a3b3db2b6ac0c7b94ec6ad458c7dcea12d9693615ac3 -# created: 2024-02-17T12:21:23.177926195Z + digest: sha256:98f3afd11308259de6e828e37376d18867fd321aba07826e29e4f8d9cab56bad +# created: 2024-02-27T15:56:18.442440378Z diff --git a/packages/db-dtypes/.kokoro/requirements.txt b/packages/db-dtypes/.kokoro/requirements.txt index f80bdcd62981..bda8e38c4f31 100644 --- a/packages/db-dtypes/.kokoro/requirements.txt +++ b/packages/db-dtypes/.kokoro/requirements.txt @@ -93,39 +93,39 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -cryptography==42.0.2 \ - --hash=sha256:087887e55e0b9c8724cf05361357875adb5c20dec27e5816b653492980d20380 \ - --hash=sha256:09a77e5b2e8ca732a19a90c5bca2d124621a1edb5438c5daa2d2738bfeb02589 \ - --hash=sha256:130c0f77022b2b9c99d8cebcdd834d81705f61c68e91ddd614ce74c657f8b3ea \ - --hash=sha256:141e2aa5ba100d3788c0ad7919b288f89d1fe015878b9659b307c9ef867d3a65 \ - --hash=sha256:28cb2c41f131a5758d6ba6a0504150d644054fd9f3203a1e8e8d7ac3aea7f73a \ - --hash=sha256:2f9f14185962e6a04ab32d1abe34eae8a9001569ee4edb64d2304bf0d65c53f3 \ - --hash=sha256:320948ab49883557a256eab46149df79435a22d2fefd6a66fe6946f1b9d9d008 \ - --hash=sha256:36d4b7c4be6411f58f60d9ce555a73df8406d484ba12a63549c88bd64f7967f1 \ - --hash=sha256:3b15c678f27d66d247132cbf13df2f75255627bcc9b6a570f7d2fd08e8c081d2 \ - --hash=sha256:3dbd37e14ce795b4af61b89b037d4bc157f2cb23e676fa16932185a04dfbf635 \ - --hash=sha256:4383b47f45b14459cab66048d384614019965ba6c1a1a141f11b5a551cace1b2 \ - --hash=sha256:44c95c0e96b3cb628e8452ec060413a49002a247b2b9938989e23a2c8291fc90 \ - --hash=sha256:4b063d3413f853e056161eb0c7724822a9740ad3caa24b8424d776cebf98e7ee \ - --hash=sha256:52ed9ebf8ac602385126c9a2fe951db36f2cb0c2538d22971487f89d0de4065a \ - --hash=sha256:55d1580e2d7e17f45d19d3b12098e352f3a37fe86d380bf45846ef257054b242 \ - --hash=sha256:5ef9bc3d046ce83c4bbf4c25e1e0547b9c441c01d30922d812e887dc5f125c12 \ - --hash=sha256:5fa82a26f92871eca593b53359c12ad7949772462f887c35edaf36f87953c0e2 \ - --hash=sha256:61321672b3ac7aade25c40449ccedbc6db72c7f5f0fdf34def5e2f8b51ca530d \ - --hash=sha256:701171f825dcab90969596ce2af253143b93b08f1a716d4b2a9d2db5084ef7be \ - --hash=sha256:841ec8af7a8491ac76ec5a9522226e287187a3107e12b7d686ad354bb78facee \ - --hash=sha256:8a06641fb07d4e8f6c7dda4fc3f8871d327803ab6542e33831c7ccfdcb4d0ad6 \ - --hash=sha256:8e88bb9eafbf6a4014d55fb222e7360eef53e613215085e65a13290577394529 \ - --hash=sha256:a00aee5d1b6c20620161984f8ab2ab69134466c51f58c052c11b076715e72929 \ - --hash=sha256:a047682d324ba56e61b7ea7c7299d51e61fd3bca7dad2ccc39b72bd0118d60a1 \ - --hash=sha256:a7ef8dd0bf2e1d0a27042b231a3baac6883cdd5557036f5e8df7139255feaac6 \ - --hash=sha256:ad28cff53f60d99a928dfcf1e861e0b2ceb2bc1f08a074fdd601b314e1cc9e0a \ - --hash=sha256:b9097a208875fc7bbeb1286d0125d90bdfed961f61f214d3f5be62cd4ed8a446 \ - --hash=sha256:b97fe7d7991c25e6a31e5d5e795986b18fbbb3107b873d5f3ae6dc9a103278e9 \ - --hash=sha256:e0ec52ba3c7f1b7d813cd52649a5b3ef1fc0d433219dc8c93827c57eab6cf888 \ - --hash=sha256:ea2c3ffb662fec8bbbfce5602e2c159ff097a4631d96235fcf0fb00e59e3ece4 \ - --hash=sha256:fa3dec4ba8fb6e662770b74f62f1a0c7d4e37e25b58b2bf2c1be4c95372b4a33 \ - --hash=sha256:fbeb725c9dc799a574518109336acccaf1303c30d45c075c665c0793c2f79a7f +cryptography==42.0.4 \ + --hash=sha256:01911714117642a3f1792c7f376db572aadadbafcd8d75bb527166009c9f1d1b \ + --hash=sha256:0e89f7b84f421c56e7ff69f11c441ebda73b8a8e6488d322ef71746224c20fce \ + --hash=sha256:12d341bd42cdb7d4937b0cabbdf2a94f949413ac4504904d0cdbdce4a22cbf88 \ + --hash=sha256:15a1fb843c48b4a604663fa30af60818cd28f895572386e5f9b8a665874c26e7 \ + --hash=sha256:1cdcdbd117681c88d717437ada72bdd5be9de117f96e3f4d50dab3f59fd9ab20 \ + --hash=sha256:1df6fcbf60560d2113b5ed90f072dc0b108d64750d4cbd46a21ec882c7aefce9 \ + --hash=sha256:3c6048f217533d89f2f8f4f0fe3044bf0b2090453b7b73d0b77db47b80af8dff \ + --hash=sha256:3e970a2119507d0b104f0a8e281521ad28fc26f2820687b3436b8c9a5fcf20d1 \ + --hash=sha256:44a64043f743485925d3bcac548d05df0f9bb445c5fcca6681889c7c3ab12764 \ + --hash=sha256:4e36685cb634af55e0677d435d425043967ac2f3790ec652b2b88ad03b85c27b \ + --hash=sha256:5f8907fcf57392cd917892ae83708761c6ff3c37a8e835d7246ff0ad251d9298 \ + --hash=sha256:69b22ab6506a3fe483d67d1ed878e1602bdd5912a134e6202c1ec672233241c1 \ + --hash=sha256:6bfadd884e7280df24d26f2186e4e07556a05d37393b0f220a840b083dc6a824 \ + --hash=sha256:6d0fbe73728c44ca3a241eff9aefe6496ab2656d6e7a4ea2459865f2e8613257 \ + --hash=sha256:6ffb03d419edcab93b4b19c22ee80c007fb2d708429cecebf1dd3258956a563a \ + --hash=sha256:810bcf151caefc03e51a3d61e53335cd5c7316c0a105cc695f0959f2c638b129 \ + --hash=sha256:831a4b37accef30cccd34fcb916a5d7b5be3cbbe27268a02832c3e450aea39cb \ + --hash=sha256:887623fe0d70f48ab3f5e4dbf234986b1329a64c066d719432d0698522749929 \ + --hash=sha256:a0298bdc6e98ca21382afe914c642620370ce0470a01e1bef6dd9b5354c36854 \ + --hash=sha256:a1327f280c824ff7885bdeef8578f74690e9079267c1c8bd7dc5cc5aa065ae52 \ + --hash=sha256:c1f25b252d2c87088abc8bbc4f1ecbf7c919e05508a7e8628e6875c40bc70923 \ + --hash=sha256:c3a5cbc620e1e17009f30dd34cb0d85c987afd21c41a74352d1719be33380885 \ + --hash=sha256:ce8613beaffc7c14f091497346ef117c1798c202b01153a8cc7b8e2ebaaf41c0 \ + --hash=sha256:d2a27aca5597c8a71abbe10209184e1a8e91c1fd470b5070a2ea60cafec35bcd \ + --hash=sha256:dad9c385ba8ee025bb0d856714f71d7840020fe176ae0229de618f14dae7a6e2 \ + --hash=sha256:db4b65b02f59035037fde0998974d84244a64c3265bdef32a827ab9b63d61b18 \ + --hash=sha256:e09469a2cec88fb7b078e16d4adec594414397e8879a4341c6ace96013463d5b \ + --hash=sha256:e53dc41cda40b248ebc40b83b31516487f7db95ab8ceac1f042626bc43a2f992 \ + --hash=sha256:f1e85a178384bf19e36779d91ff35c7617c885da487d689b05c1366f9933ad74 \ + --hash=sha256:f47be41843200f7faec0683ad751e5ef11b9a56a220d57f300376cd8aba81660 \ + --hash=sha256:fb0cef872d8193e487fc6bdb08559c3aa41b659a7d9be48b2e10747f47863925 \ + --hash=sha256:ffc73996c4fca3d2b6c1c8c12bfd3ad00def8621da24f547626bf06441400449 # via # gcp-releasetool # secretstorage From ccb64e243566c91df80598126e119f3d6027cf0f Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 13 Mar 2024 19:12:59 +0100 Subject: [PATCH 148/210] chore(deps): update dependency pytest to v8.1.1 (#251) * chore(deps): update dependency pytest to v8.1.1 * pin pytest==7.4.4 for python 3.7 --------- Co-authored-by: Lingqing Gan --- packages/db-dtypes/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/db-dtypes/samples/snippets/requirements-test.txt b/packages/db-dtypes/samples/snippets/requirements-test.txt index 4f8692e74482..c08dde0c8aaa 100644 --- a/packages/db-dtypes/samples/snippets/requirements-test.txt +++ b/packages/db-dtypes/samples/snippets/requirements-test.txt @@ -1,2 +1,2 @@ pytest==7.4.4; python_version == '3.7' -pytest==8.0.2; python_version > '3.7' +pytest==8.1.1; python_version > '3.7' From 512182280694d97493d6ed579252c25d927b4264 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 15 Mar 2024 09:55:50 -0700 Subject: [PATCH 149/210] chore(python): add requirements for docs build (#254) Source-Link: https://github.com/googleapis/synthtool/commit/85c23b6bc4352c1b0674848eaeb4e48645aeda6b Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:3741fd1f5f5150378563c76afa06bcc12777b5fe54c5ee01115218f83872134f Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 4 +- packages/db-dtypes/.kokoro/build.sh | 7 ---- .../db-dtypes/.kokoro/docker/docs/Dockerfile | 4 ++ .../.kokoro/docker/docs/requirements.in | 1 + .../.kokoro/docker/docs/requirements.txt | 38 +++++++++++++++++++ 5 files changed, 45 insertions(+), 9 deletions(-) create mode 100644 packages/db-dtypes/.kokoro/docker/docs/requirements.in create mode 100644 packages/db-dtypes/.kokoro/docker/docs/requirements.txt diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index e4e943e0259a..5d9542b1cb21 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:98f3afd11308259de6e828e37376d18867fd321aba07826e29e4f8d9cab56bad -# created: 2024-02-27T15:56:18.442440378Z + digest: sha256:3741fd1f5f5150378563c76afa06bcc12777b5fe54c5ee01115218f83872134f +# created: 2024-03-15T16:26:15.743347415Z diff --git a/packages/db-dtypes/.kokoro/build.sh b/packages/db-dtypes/.kokoro/build.sh index 41c765dea85c..da9bfe26f62f 100755 --- a/packages/db-dtypes/.kokoro/build.sh +++ b/packages/db-dtypes/.kokoro/build.sh @@ -33,13 +33,6 @@ export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json # Setup project id. export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") -# Remove old nox -python3 -m pip uninstall --yes --quiet nox-automation - -# Install nox -python3 -m pip install --upgrade --quiet nox -python3 -m nox --version - # If this is a continuous build, send the test log to the FlakyBot. # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then diff --git a/packages/db-dtypes/.kokoro/docker/docs/Dockerfile b/packages/db-dtypes/.kokoro/docker/docs/Dockerfile index 8e39a2cc438d..bdaf39fe22d0 100644 --- a/packages/db-dtypes/.kokoro/docker/docs/Dockerfile +++ b/packages/db-dtypes/.kokoro/docker/docs/Dockerfile @@ -80,4 +80,8 @@ RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ # Test pip RUN python3 -m pip +# Install build requirements +COPY requirements.txt /requirements.txt +RUN python3 -m pip install --require-hashes -r requirements.txt + CMD ["python3.8"] diff --git a/packages/db-dtypes/.kokoro/docker/docs/requirements.in b/packages/db-dtypes/.kokoro/docker/docs/requirements.in new file mode 100644 index 000000000000..816817c672a1 --- /dev/null +++ b/packages/db-dtypes/.kokoro/docker/docs/requirements.in @@ -0,0 +1 @@ +nox diff --git a/packages/db-dtypes/.kokoro/docker/docs/requirements.txt b/packages/db-dtypes/.kokoro/docker/docs/requirements.txt new file mode 100644 index 000000000000..0e5d70f20f83 --- /dev/null +++ b/packages/db-dtypes/.kokoro/docker/docs/requirements.txt @@ -0,0 +1,38 @@ +# +# This file is autogenerated by pip-compile with Python 3.9 +# by the following command: +# +# pip-compile --allow-unsafe --generate-hashes requirements.in +# +argcomplete==3.2.3 \ + --hash=sha256:bf7900329262e481be5a15f56f19736b376df6f82ed27576fa893652c5de6c23 \ + --hash=sha256:c12355e0494c76a2a7b73e3a59b09024ca0ba1e279fb9ed6c1b82d5b74b6a70c + # via nox +colorlog==6.8.2 \ + --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ + --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33 + # via nox +distlib==0.3.8 \ + --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ + --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 + # via virtualenv +filelock==3.13.1 \ + --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ + --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c + # via virtualenv +nox==2024.3.2 \ + --hash=sha256:e53514173ac0b98dd47585096a55572fe504fecede58ced708979184d05440be \ + --hash=sha256:f521ae08a15adbf5e11f16cb34e8d0e6ea521e0b92868f684e91677deb974553 + # via -r requirements.in +packaging==24.0 \ + --hash=sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5 \ + --hash=sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9 + # via nox +platformdirs==4.2.0 \ + --hash=sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068 \ + --hash=sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768 + # via virtualenv +virtualenv==20.25.1 \ + --hash=sha256:961c026ac520bac5f69acb8ea063e8a4f071bcc9457b9c1f28f6b085c511583a \ + --hash=sha256:e08e13ecdca7a0bd53798f356d5831434afa5b07b93f0abdf0797b7a06ffe197 + # via nox From dc51f07f5a1fbdc2552f7e63c8c5f927e4f0ee0b Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 15 Mar 2024 17:00:20 +0000 Subject: [PATCH 150/210] chore(python): update dependencies in /.kokoro (#253) Source-Link: https://github.com/googleapis/synthtool/commit/db94845da69ccdfefd7ce55c84e6cfa74829747e Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:a8a80fc6456e433df53fc2a0d72ca0345db0ddefb409f1b75b118dfd1babd952 Co-authored-by: Owl Bot Co-authored-by: Lingqing Gan --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 4 +- packages/db-dtypes/.kokoro/requirements.in | 3 +- packages/db-dtypes/.kokoro/requirements.txt | 114 +++++++++---------- 3 files changed, 56 insertions(+), 65 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 5d9542b1cb21..4bdeef3904e2 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:3741fd1f5f5150378563c76afa06bcc12777b5fe54c5ee01115218f83872134f -# created: 2024-03-15T16:26:15.743347415Z + digest: sha256:a8a80fc6456e433df53fc2a0d72ca0345db0ddefb409f1b75b118dfd1babd952 +# created: 2024-03-15T16:25:47.905264637Z diff --git a/packages/db-dtypes/.kokoro/requirements.in b/packages/db-dtypes/.kokoro/requirements.in index ec867d9fd65a..fff4d9ce0d0a 100644 --- a/packages/db-dtypes/.kokoro/requirements.in +++ b/packages/db-dtypes/.kokoro/requirements.in @@ -1,5 +1,5 @@ gcp-docuploader -gcp-releasetool>=1.10.5 # required for compatibility with cryptography>=39.x +gcp-releasetool>=2 # required for compatibility with cryptography>=42.x importlib-metadata typing-extensions twine @@ -8,3 +8,4 @@ setuptools nox>=2022.11.21 # required to remove dependency on py charset-normalizer<3 click<8.1.0 +cryptography>=42.0.5 diff --git a/packages/db-dtypes/.kokoro/requirements.txt b/packages/db-dtypes/.kokoro/requirements.txt index bda8e38c4f31..dd61f5f32018 100644 --- a/packages/db-dtypes/.kokoro/requirements.txt +++ b/packages/db-dtypes/.kokoro/requirements.txt @@ -93,40 +93,41 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -cryptography==42.0.4 \ - --hash=sha256:01911714117642a3f1792c7f376db572aadadbafcd8d75bb527166009c9f1d1b \ - --hash=sha256:0e89f7b84f421c56e7ff69f11c441ebda73b8a8e6488d322ef71746224c20fce \ - --hash=sha256:12d341bd42cdb7d4937b0cabbdf2a94f949413ac4504904d0cdbdce4a22cbf88 \ - --hash=sha256:15a1fb843c48b4a604663fa30af60818cd28f895572386e5f9b8a665874c26e7 \ - --hash=sha256:1cdcdbd117681c88d717437ada72bdd5be9de117f96e3f4d50dab3f59fd9ab20 \ - --hash=sha256:1df6fcbf60560d2113b5ed90f072dc0b108d64750d4cbd46a21ec882c7aefce9 \ - --hash=sha256:3c6048f217533d89f2f8f4f0fe3044bf0b2090453b7b73d0b77db47b80af8dff \ - --hash=sha256:3e970a2119507d0b104f0a8e281521ad28fc26f2820687b3436b8c9a5fcf20d1 \ - --hash=sha256:44a64043f743485925d3bcac548d05df0f9bb445c5fcca6681889c7c3ab12764 \ - --hash=sha256:4e36685cb634af55e0677d435d425043967ac2f3790ec652b2b88ad03b85c27b \ - --hash=sha256:5f8907fcf57392cd917892ae83708761c6ff3c37a8e835d7246ff0ad251d9298 \ - --hash=sha256:69b22ab6506a3fe483d67d1ed878e1602bdd5912a134e6202c1ec672233241c1 \ - --hash=sha256:6bfadd884e7280df24d26f2186e4e07556a05d37393b0f220a840b083dc6a824 \ - --hash=sha256:6d0fbe73728c44ca3a241eff9aefe6496ab2656d6e7a4ea2459865f2e8613257 \ - --hash=sha256:6ffb03d419edcab93b4b19c22ee80c007fb2d708429cecebf1dd3258956a563a \ - --hash=sha256:810bcf151caefc03e51a3d61e53335cd5c7316c0a105cc695f0959f2c638b129 \ - --hash=sha256:831a4b37accef30cccd34fcb916a5d7b5be3cbbe27268a02832c3e450aea39cb \ - --hash=sha256:887623fe0d70f48ab3f5e4dbf234986b1329a64c066d719432d0698522749929 \ - --hash=sha256:a0298bdc6e98ca21382afe914c642620370ce0470a01e1bef6dd9b5354c36854 \ - --hash=sha256:a1327f280c824ff7885bdeef8578f74690e9079267c1c8bd7dc5cc5aa065ae52 \ - --hash=sha256:c1f25b252d2c87088abc8bbc4f1ecbf7c919e05508a7e8628e6875c40bc70923 \ - --hash=sha256:c3a5cbc620e1e17009f30dd34cb0d85c987afd21c41a74352d1719be33380885 \ - --hash=sha256:ce8613beaffc7c14f091497346ef117c1798c202b01153a8cc7b8e2ebaaf41c0 \ - --hash=sha256:d2a27aca5597c8a71abbe10209184e1a8e91c1fd470b5070a2ea60cafec35bcd \ - --hash=sha256:dad9c385ba8ee025bb0d856714f71d7840020fe176ae0229de618f14dae7a6e2 \ - --hash=sha256:db4b65b02f59035037fde0998974d84244a64c3265bdef32a827ab9b63d61b18 \ - --hash=sha256:e09469a2cec88fb7b078e16d4adec594414397e8879a4341c6ace96013463d5b \ - --hash=sha256:e53dc41cda40b248ebc40b83b31516487f7db95ab8ceac1f042626bc43a2f992 \ - --hash=sha256:f1e85a178384bf19e36779d91ff35c7617c885da487d689b05c1366f9933ad74 \ - --hash=sha256:f47be41843200f7faec0683ad751e5ef11b9a56a220d57f300376cd8aba81660 \ - --hash=sha256:fb0cef872d8193e487fc6bdb08559c3aa41b659a7d9be48b2e10747f47863925 \ - --hash=sha256:ffc73996c4fca3d2b6c1c8c12bfd3ad00def8621da24f547626bf06441400449 +cryptography==42.0.5 \ + --hash=sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee \ + --hash=sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576 \ + --hash=sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d \ + --hash=sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30 \ + --hash=sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413 \ + --hash=sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb \ + --hash=sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da \ + --hash=sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4 \ + --hash=sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd \ + --hash=sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc \ + --hash=sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8 \ + --hash=sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1 \ + --hash=sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc \ + --hash=sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e \ + --hash=sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8 \ + --hash=sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940 \ + --hash=sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400 \ + --hash=sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7 \ + --hash=sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16 \ + --hash=sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278 \ + --hash=sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74 \ + --hash=sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec \ + --hash=sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1 \ + --hash=sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2 \ + --hash=sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c \ + --hash=sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922 \ + --hash=sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a \ + --hash=sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6 \ + --hash=sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1 \ + --hash=sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e \ + --hash=sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac \ + --hash=sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7 # via + # -r requirements.in # gcp-releasetool # secretstorage distlib==0.3.7 \ @@ -145,9 +146,9 @@ gcp-docuploader==0.6.5 \ --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea # via -r requirements.in -gcp-releasetool==1.16.0 \ - --hash=sha256:27bf19d2e87aaa884096ff941aa3c592c482be3d6a2bfe6f06afafa6af2353e3 \ - --hash=sha256:a316b197a543fd036209d0caba7a8eb4d236d8e65381c80cbc6d7efaa7606d63 +gcp-releasetool==2.0.0 \ + --hash=sha256:3d73480b50ba243f22d7c7ec08b115a30e1c7817c4899781840c26f9c55b8277 \ + --hash=sha256:7aa9fd935ec61e581eb8458ad00823786d91756c25e492f372b2b30962f3c28f # via -r requirements.in google-api-core==2.12.0 \ --hash=sha256:c22e01b1e3c4dcd90998494879612c38d0a3411d1f7b679eb89e2abe3ce1f553 \ @@ -392,29 +393,18 @@ platformdirs==3.11.0 \ --hash=sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3 \ --hash=sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e # via virtualenv -protobuf==3.20.3 \ - --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \ - --hash=sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c \ - --hash=sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2 \ - --hash=sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b \ - --hash=sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050 \ - --hash=sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9 \ - --hash=sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7 \ - --hash=sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454 \ - --hash=sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480 \ - --hash=sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469 \ - --hash=sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c \ - --hash=sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e \ - --hash=sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db \ - --hash=sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905 \ - --hash=sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b \ - --hash=sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86 \ - --hash=sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4 \ - --hash=sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402 \ - --hash=sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7 \ - --hash=sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4 \ - --hash=sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99 \ - --hash=sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee +protobuf==4.25.3 \ + --hash=sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4 \ + --hash=sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8 \ + --hash=sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c \ + --hash=sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d \ + --hash=sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4 \ + --hash=sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa \ + --hash=sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c \ + --hash=sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019 \ + --hash=sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9 \ + --hash=sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c \ + --hash=sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2 # via # gcp-docuploader # gcp-releasetool @@ -518,7 +508,7 @@ zipp==3.17.0 \ # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==68.2.2 \ - --hash=sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87 \ - --hash=sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a +setuptools==69.2.0 \ + --hash=sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e \ + --hash=sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c # via -r requirements.in From 71dfb56faa316a0294e58ee254309595ca1d50b9 Mon Sep 17 00:00:00 2001 From: Lingqing Gan Date: Wed, 3 Apr 2024 06:38:09 -0700 Subject: [PATCH 151/210] testing: use arbitrary equality for pinned pytest version (#262) --- packages/db-dtypes/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/db-dtypes/samples/snippets/requirements-test.txt b/packages/db-dtypes/samples/snippets/requirements-test.txt index c08dde0c8aaa..e56028c28e66 100644 --- a/packages/db-dtypes/samples/snippets/requirements-test.txt +++ b/packages/db-dtypes/samples/snippets/requirements-test.txt @@ -1,2 +1,2 @@ -pytest==7.4.4; python_version == '3.7' +pytest===7.4.4; python_version == '3.7' # prevents dependabot from upgrading it pytest==8.1.1; python_version > '3.7' From 06d69eba8d611349275ff960034a9cc68cb69433 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 5 Apr 2024 13:26:34 -0700 Subject: [PATCH 152/210] docs: add summary_overview template (#264) Source-Link: https://github.com/googleapis/synthtool/commit/d7c2271d319aeb7e3043ec3f1ecec6f3604f1f1e Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:db05f70829de86fe8e34ba972b7fe56da57eaccf1691f875ed4867db80d5cec9 Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 4 ++-- packages/db-dtypes/.github/auto-label.yaml | 5 +++++ packages/db-dtypes/.github/blunderbuss.yml | 17 +++++++++++++++++ 3 files changed, 24 insertions(+), 2 deletions(-) create mode 100644 packages/db-dtypes/.github/blunderbuss.yml diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 4bdeef3904e2..3189719173b1 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:a8a80fc6456e433df53fc2a0d72ca0345db0ddefb409f1b75b118dfd1babd952 -# created: 2024-03-15T16:25:47.905264637Z + digest: sha256:db05f70829de86fe8e34ba972b7fe56da57eaccf1691f875ed4867db80d5cec9 +# created: 2024-04-05T19:51:26.466869535Z diff --git a/packages/db-dtypes/.github/auto-label.yaml b/packages/db-dtypes/.github/auto-label.yaml index b2016d119b40..8b37ee89711f 100644 --- a/packages/db-dtypes/.github/auto-label.yaml +++ b/packages/db-dtypes/.github/auto-label.yaml @@ -13,3 +13,8 @@ # limitations under the License. requestsize: enabled: true + +path: + pullrequest: true + paths: + samples: "samples" diff --git a/packages/db-dtypes/.github/blunderbuss.yml b/packages/db-dtypes/.github/blunderbuss.yml new file mode 100644 index 000000000000..5b7383dc7665 --- /dev/null +++ b/packages/db-dtypes/.github/blunderbuss.yml @@ -0,0 +1,17 @@ +# Blunderbuss config +# +# This file controls who is assigned for pull requests and issues. +# Note: This file is autogenerated. To make changes to the assignee +# team, please update `codeowner_team` in `.repo-metadata.json`. +assign_issues: + - googleapis/api-bigquery + +assign_issues_by: + - labels: + - "samples" + to: + - googleapis/python-samples-reviewers + - googleapis/api-bigquery + +assign_prs: + - googleapis/api-bigquery From 42bcb547374c6eb30bfaed782536665366ea75cc Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 12 Apr 2024 17:03:12 -0400 Subject: [PATCH 153/210] chore(python): bump idna from 3.4 to 3.7 in .kokoro (#269) Source-Link: https://github.com/googleapis/synthtool/commit/d50980e704793a2d3310bfb3664f3a82f24b5796 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:5a4c19d17e597b92d786e569be101e636c9c2817731f80a5adec56b2aa8fe070 Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 4 ++-- packages/db-dtypes/.kokoro/requirements.txt | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 3189719173b1..81f87c56917d 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:db05f70829de86fe8e34ba972b7fe56da57eaccf1691f875ed4867db80d5cec9 -# created: 2024-04-05T19:51:26.466869535Z + digest: sha256:5a4c19d17e597b92d786e569be101e636c9c2817731f80a5adec56b2aa8fe070 +# created: 2024-04-12T11:35:58.922854369Z diff --git a/packages/db-dtypes/.kokoro/requirements.txt b/packages/db-dtypes/.kokoro/requirements.txt index dd61f5f32018..51f92b8e12f1 100644 --- a/packages/db-dtypes/.kokoro/requirements.txt +++ b/packages/db-dtypes/.kokoro/requirements.txt @@ -252,9 +252,9 @@ googleapis-common-protos==1.61.0 \ --hash=sha256:22f1915393bb3245343f6efe87f6fe868532efc12aa26b391b15132e1279f1c0 \ --hash=sha256:8a64866a97f6304a7179873a465d6eee97b7a24ec6cfd78e0f575e96b821240b # via google-api-core -idna==3.4 \ - --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ - --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 +idna==3.7 \ + --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \ + --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 # via requests importlib-metadata==6.8.0 \ --hash=sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb \ From 5ceeca6d2e4687511e10f5780a5a6fffb13b6ee1 Mon Sep 17 00:00:00 2001 From: Lingqing Gan Date: Mon, 15 Apr 2024 06:10:57 -0700 Subject: [PATCH 154/210] test: use np.nan for numpy >= 2.0.0 (#270) * fix: use np.nan for numpy >= 2.0.0 * merge unittest-prerelease.yml into unittest.yml * add unit-prerelease to cover's dependencies * fix coverage file upload --- .../.github/workflows/unittest-prerelease.yml | 32 ------------------- .../db-dtypes/.github/workflows/unittest.yml | 28 ++++++++++++++++ packages/db-dtypes/owlbot.py | 2 +- packages/db-dtypes/tests/unit/test_dtypes.py | 9 +++++- 4 files changed, 37 insertions(+), 34 deletions(-) delete mode 100644 packages/db-dtypes/.github/workflows/unittest-prerelease.yml diff --git a/packages/db-dtypes/.github/workflows/unittest-prerelease.yml b/packages/db-dtypes/.github/workflows/unittest-prerelease.yml deleted file mode 100644 index 9d30c755af17..000000000000 --- a/packages/db-dtypes/.github/workflows/unittest-prerelease.yml +++ /dev/null @@ -1,32 +0,0 @@ -on: - pull_request: - branches: - - main -name: unittest-prerelease -jobs: - unit-prerelease: - runs-on: ubuntu-latest - strategy: - matrix: - python: ['3.12'] - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Setup Python - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python }} - - name: Install nox - run: | - python -m pip install --upgrade setuptools pip wheel - python -m pip install nox - - name: Run unit tests - env: - COVERAGE_FILE: .coverage-prerelease-${{ matrix.python }} - run: | - nox -s unit_prerelease - - name: Upload coverage results - uses: actions/upload-artifact@v4 - with: - name: coverage-artifacts - path: .coverage-${{ matrix.python }} diff --git a/packages/db-dtypes/.github/workflows/unittest.yml b/packages/db-dtypes/.github/workflows/unittest.yml index f4a337c496a0..89f021e695e5 100644 --- a/packages/db-dtypes/.github/workflows/unittest.yml +++ b/packages/db-dtypes/.github/workflows/unittest.yml @@ -31,10 +31,38 @@ jobs: name: coverage-artifact-${{ matrix.python }} path: .coverage-${{ matrix.python }} + unit-prerelease: + runs-on: ubuntu-latest + strategy: + matrix: + python: ['3.12'] + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python }} + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run unit tests + env: + COVERAGE_FILE: .coverage-prerelease-${{ matrix.python }} + run: | + nox -s unit_prerelease + - name: Upload coverage results + uses: actions/upload-artifact@v4 + with: + name: coverage-artifact-prerelease-${{ matrix.python }} + path: .coverage-prerelease-${{ matrix.python }} + cover: runs-on: ubuntu-latest needs: - unit + - unit-prerelease steps: - name: Checkout uses: actions/checkout@v4 diff --git a/packages/db-dtypes/owlbot.py b/packages/db-dtypes/owlbot.py index d1b3c088775f..2c0ea58068ce 100644 --- a/packages/db-dtypes/owlbot.py +++ b/packages/db-dtypes/owlbot.py @@ -34,7 +34,7 @@ "pandas": "https://pandas.pydata.org/pandas-docs/stable/" }, ) -s.move(templated_files, excludes=["docs/multiprocessing.rst", "README.rst"]) +s.move(templated_files, excludes=["docs/multiprocessing.rst", "README.rst", ".github/workflows/unittest.yml"]) # ---------------------------------------------------------------------------- # Fixup files diff --git a/packages/db-dtypes/tests/unit/test_dtypes.py b/packages/db-dtypes/tests/unit/test_dtypes.py index 6584cee7ee6a..f2c5593c8fec 100644 --- a/packages/db-dtypes/tests/unit/test_dtypes.py +++ b/packages/db-dtypes/tests/unit/test_dtypes.py @@ -314,7 +314,7 @@ def test__validate_scalar_invalid(dtype): (False, None), (True, None), (True, pd.NaT if pd else None), - (True, np.NaN if pd else None), + (True, "np.nan" if pd else None), (True, 42), ], ) @@ -328,6 +328,13 @@ def test_take(dtype, allow_fill, fill_value): if dtype == "dbdate" else datetime.time(0, 42, 42, 424242) ) + elif fill_value == "np.nan": + expected_fill = pd.NaT + try: + fill_value = np.NaN + except AttributeError: + # `np.NaN` was removed in NumPy 2.0 release. Use `np.nan` instead + fill_value = np.nan else: expected_fill = pd.NaT b = a.take([1, -1, 3], allow_fill=True, fill_value=fill_value) From ce7277887e50bea867acc55f821d1739690774b3 Mon Sep 17 00:00:00 2001 From: Chalmer Lowe Date: Thu, 27 Jun 2024 13:34:18 -0400 Subject: [PATCH 155/210] bug: refine constraint to avoid numpy pandas incompatability (#277) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Pins the version of `numpy` used with `python 3.9` to be less than `2.0.0`. There is a potentially unexpected interaction between `pandas` and `numpy` here. Depending on the version we use for each, we get different results in terms of pass/fail for the unit tests associated with `python 3.9` |numpy version|pandas version|unit-3.9 pass/fail| |-|-|-| |2.0.0|1.3.0|fail| |2.0.0|2.2.2|pass| |2.0.0|1.5.3|fail| |1.26.4|1.5.3|pass| |1.26.4|1.3.0|pass| This [appears to be a known incompatibility](https://togithub.com/numpy/numpy/issues/26710) between how `pip` resolves the dependency versions for `numpy` and `pandas` and the only current fix is to pin versions to something that "works" to avoid allowing `pip` to resolve into a failing state. (NOTE the linked issue references various versions of numpy and pandas, depending on the combination, inlcuding 3.9 throughout the thread, even though the title references `python 3.12`) Fixes #275 🦕 --- packages/db-dtypes/testing/constraints-3.9.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/db-dtypes/testing/constraints-3.9.txt b/packages/db-dtypes/testing/constraints-3.9.txt index d814dcd4924a..b9ab6bf3d530 100644 --- a/packages/db-dtypes/testing/constraints-3.9.txt +++ b/packages/db-dtypes/testing/constraints-3.9.txt @@ -1,2 +1,3 @@ # Make sure we test with pandas 1.3.0. The Python version isn't that relevant. pandas==1.3.0 +numpy<2.0.0 From f0c9fd91ebedd5d59505e56ace3653e4b6e92920 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 3 Jul 2024 13:03:04 -0700 Subject: [PATCH 156/210] chore: update templated files (#278) * chore: update templated files Source-Link: https://github.com/googleapis/synthtool/commit/a37f74cd300d1f56d6f28c368d2931f72adee948 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:d3de8a02819f65001effcbd3ea76ce97e9bcff035c7a89457f40f892c87c5b32 * exclude noxfile.py from templates --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- packages/db-dtypes/.coveragerc | 2 +- packages/db-dtypes/.flake8 | 2 +- packages/db-dtypes/.github/.OwlBot.lock.yaml | 4 +- packages/db-dtypes/.github/auto-label.yaml | 2 +- packages/db-dtypes/.kokoro/build.sh | 2 +- .../db-dtypes/.kokoro/docker/docs/Dockerfile | 2 +- .../db-dtypes/.kokoro/populate-secrets.sh | 2 +- packages/db-dtypes/.kokoro/publish-docs.sh | 2 +- packages/db-dtypes/.kokoro/release.sh | 2 +- packages/db-dtypes/.kokoro/requirements.txt | 509 +++++++++--------- .../.kokoro/test-samples-against-head.sh | 2 +- .../db-dtypes/.kokoro/test-samples-impl.sh | 2 +- packages/db-dtypes/.kokoro/test-samples.sh | 2 +- packages/db-dtypes/.kokoro/trampoline.sh | 2 +- packages/db-dtypes/.kokoro/trampoline_v2.sh | 2 +- packages/db-dtypes/.pre-commit-config.yaml | 2 +- packages/db-dtypes/.trampolinerc | 2 +- packages/db-dtypes/MANIFEST.in | 2 +- packages/db-dtypes/docs/conf.py | 2 +- packages/db-dtypes/owlbot.py | 156 +----- packages/db-dtypes/scripts/decrypt-secrets.sh | 2 +- .../scripts/readme-gen/readme_gen.py | 2 +- 22 files changed, 288 insertions(+), 419 deletions(-) diff --git a/packages/db-dtypes/.coveragerc b/packages/db-dtypes/.coveragerc index 24e6fe932bed..f70ea210f21f 100644 --- a/packages/db-dtypes/.coveragerc +++ b/packages/db-dtypes/.coveragerc @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/db-dtypes/.flake8 b/packages/db-dtypes/.flake8 index 87f6e408c47d..32986c79287a 100644 --- a/packages/db-dtypes/.flake8 +++ b/packages/db-dtypes/.flake8 @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 81f87c56917d..91d742b5b9fe 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:5a4c19d17e597b92d786e569be101e636c9c2817731f80a5adec56b2aa8fe070 -# created: 2024-04-12T11:35:58.922854369Z + digest: sha256:d3de8a02819f65001effcbd3ea76ce97e9bcff035c7a89457f40f892c87c5b32 +# created: 2024-07-03T17:43:00.77142528Z diff --git a/packages/db-dtypes/.github/auto-label.yaml b/packages/db-dtypes/.github/auto-label.yaml index 8b37ee89711f..21786a4eb085 100644 --- a/packages/db-dtypes/.github/auto-label.yaml +++ b/packages/db-dtypes/.github/auto-label.yaml @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/db-dtypes/.kokoro/build.sh b/packages/db-dtypes/.kokoro/build.sh index da9bfe26f62f..2731d6f2a78d 100755 --- a/packages/db-dtypes/.kokoro/build.sh +++ b/packages/db-dtypes/.kokoro/build.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/db-dtypes/.kokoro/docker/docs/Dockerfile b/packages/db-dtypes/.kokoro/docker/docs/Dockerfile index bdaf39fe22d0..a26ce61930f5 100644 --- a/packages/db-dtypes/.kokoro/docker/docs/Dockerfile +++ b/packages/db-dtypes/.kokoro/docker/docs/Dockerfile @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/db-dtypes/.kokoro/populate-secrets.sh b/packages/db-dtypes/.kokoro/populate-secrets.sh index 6f3972140e80..c435402f473e 100755 --- a/packages/db-dtypes/.kokoro/populate-secrets.sh +++ b/packages/db-dtypes/.kokoro/populate-secrets.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC. +# Copyright 2024 Google LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/db-dtypes/.kokoro/publish-docs.sh b/packages/db-dtypes/.kokoro/publish-docs.sh index 9eafe0be3bba..38f083f05aa0 100755 --- a/packages/db-dtypes/.kokoro/publish-docs.sh +++ b/packages/db-dtypes/.kokoro/publish-docs.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/db-dtypes/.kokoro/release.sh b/packages/db-dtypes/.kokoro/release.sh index 84a42d909c0c..7846e14cf211 100755 --- a/packages/db-dtypes/.kokoro/release.sh +++ b/packages/db-dtypes/.kokoro/release.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/db-dtypes/.kokoro/requirements.txt b/packages/db-dtypes/.kokoro/requirements.txt index 51f92b8e12f1..35ece0e4d2e9 100644 --- a/packages/db-dtypes/.kokoro/requirements.txt +++ b/packages/db-dtypes/.kokoro/requirements.txt @@ -4,21 +4,25 @@ # # pip-compile --allow-unsafe --generate-hashes requirements.in # -argcomplete==3.1.4 \ - --hash=sha256:72558ba729e4c468572609817226fb0a6e7e9a0a7d477b882be168c0b4a62b94 \ - --hash=sha256:fbe56f8cda08aa9a04b307d8482ea703e96a6a801611acb4be9bf3942017989f +argcomplete==3.4.0 \ + --hash=sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5 \ + --hash=sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f # via nox -attrs==23.1.0 \ - --hash=sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04 \ - --hash=sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015 +attrs==23.2.0 \ + --hash=sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30 \ + --hash=sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1 # via gcp-releasetool -cachetools==5.3.2 \ - --hash=sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2 \ - --hash=sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1 +backports-tarfile==1.2.0 \ + --hash=sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34 \ + --hash=sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991 + # via jaraco-context +cachetools==5.3.3 \ + --hash=sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945 \ + --hash=sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105 # via google-auth -certifi==2023.7.22 \ - --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ - --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9 +certifi==2024.6.2 \ + --hash=sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516 \ + --hash=sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56 # via requests cffi==1.16.0 \ --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ @@ -87,90 +91,90 @@ click==8.0.4 \ # -r requirements.in # gcp-docuploader # gcp-releasetool -colorlog==6.7.0 \ - --hash=sha256:0d33ca236784a1ba3ff9c532d4964126d8a2c44f1f0cb1d2b0728196f512f662 \ - --hash=sha256:bd94bd21c1e13fac7bd3153f4bc3a7dc0eb0974b8bc2fdf1a989e474f6e582e5 +colorlog==6.8.2 \ + --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ + --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33 # via # gcp-docuploader # nox -cryptography==42.0.5 \ - --hash=sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee \ - --hash=sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576 \ - --hash=sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d \ - --hash=sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30 \ - --hash=sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413 \ - --hash=sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb \ - --hash=sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da \ - --hash=sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4 \ - --hash=sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd \ - --hash=sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc \ - --hash=sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8 \ - --hash=sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1 \ - --hash=sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc \ - --hash=sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e \ - --hash=sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8 \ - --hash=sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940 \ - --hash=sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400 \ - --hash=sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7 \ - --hash=sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16 \ - --hash=sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278 \ - --hash=sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74 \ - --hash=sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec \ - --hash=sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1 \ - --hash=sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2 \ - --hash=sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c \ - --hash=sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922 \ - --hash=sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a \ - --hash=sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6 \ - --hash=sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1 \ - --hash=sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e \ - --hash=sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac \ - --hash=sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7 +cryptography==42.0.8 \ + --hash=sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad \ + --hash=sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583 \ + --hash=sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b \ + --hash=sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c \ + --hash=sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1 \ + --hash=sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648 \ + --hash=sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949 \ + --hash=sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba \ + --hash=sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c \ + --hash=sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9 \ + --hash=sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d \ + --hash=sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c \ + --hash=sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e \ + --hash=sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2 \ + --hash=sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d \ + --hash=sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7 \ + --hash=sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70 \ + --hash=sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2 \ + --hash=sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7 \ + --hash=sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14 \ + --hash=sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe \ + --hash=sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e \ + --hash=sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71 \ + --hash=sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961 \ + --hash=sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7 \ + --hash=sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c \ + --hash=sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28 \ + --hash=sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842 \ + --hash=sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902 \ + --hash=sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801 \ + --hash=sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a \ + --hash=sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e # via # -r requirements.in # gcp-releasetool # secretstorage -distlib==0.3.7 \ - --hash=sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057 \ - --hash=sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8 +distlib==0.3.8 \ + --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ + --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 # via virtualenv -docutils==0.20.1 \ - --hash=sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6 \ - --hash=sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b +docutils==0.21.2 \ + --hash=sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f \ + --hash=sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2 # via readme-renderer -filelock==3.13.1 \ - --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ - --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c +filelock==3.15.4 \ + --hash=sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb \ + --hash=sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7 # via virtualenv gcp-docuploader==0.6.5 \ --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea # via -r requirements.in -gcp-releasetool==2.0.0 \ - --hash=sha256:3d73480b50ba243f22d7c7ec08b115a30e1c7817c4899781840c26f9c55b8277 \ - --hash=sha256:7aa9fd935ec61e581eb8458ad00823786d91756c25e492f372b2b30962f3c28f +gcp-releasetool==2.0.1 \ + --hash=sha256:34314a910c08e8911d9c965bd44f8f2185c4f556e737d719c33a41f6a610de96 \ + --hash=sha256:b0d5863c6a070702b10883d37c4bdfd74bf930fe417f36c0c965d3b7c779ae62 # via -r requirements.in -google-api-core==2.12.0 \ - --hash=sha256:c22e01b1e3c4dcd90998494879612c38d0a3411d1f7b679eb89e2abe3ce1f553 \ - --hash=sha256:ec6054f7d64ad13b41e43d96f735acbd763b0f3b695dabaa2d579673f6a6e160 +google-api-core==2.19.1 \ + --hash=sha256:f12a9b8309b5e21d92483bbd47ce2c445861ec7d269ef6784ecc0ea8c1fa6125 \ + --hash=sha256:f4695f1e3650b316a795108a76a1c416e6afb036199d1c1f1f110916df479ffd # via # google-cloud-core # google-cloud-storage -google-auth==2.23.4 \ - --hash=sha256:79905d6b1652187def79d491d6e23d0cbb3a21d3c7ba0dbaa9c8a01906b13ff3 \ - --hash=sha256:d4bbc92fe4b8bfd2f3e8d88e5ba7085935da208ee38a134fc280e7ce682a05f2 +google-auth==2.31.0 \ + --hash=sha256:042c4702efa9f7d3c48d3a69341c209381b125faa6dbf3ebe56bc7e40ae05c23 \ + --hash=sha256:87805c36970047247c8afe614d4e3af8eceafc1ebba0c679fe75ddd1d575e871 # via # gcp-releasetool # google-api-core # google-cloud-core # google-cloud-storage -google-cloud-core==2.3.3 \ - --hash=sha256:37b80273c8d7eee1ae816b3a20ae43585ea50506cb0e60f3cf5be5f87f1373cb \ - --hash=sha256:fbd11cad3e98a7e5b0343dc07cb1039a5ffd7a5bb96e1f1e27cee4bda4a90863 +google-cloud-core==2.4.1 \ + --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \ + --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61 # via google-cloud-storage -google-cloud-storage==2.13.0 \ - --hash=sha256:ab0bf2e1780a1b74cf17fccb13788070b729f50c252f0c94ada2aae0ca95437d \ - --hash=sha256:f62dc4c7b6cd4360d072e3deb28035fbdad491ac3d9b0b1815a12daea10f37c7 +google-cloud-storage==2.17.0 \ + --hash=sha256:49378abff54ef656b52dca5ef0f2eba9aa83dc2b2c72c78714b03a1a95fe9388 \ + --hash=sha256:5b393bc766b7a3bc6f5407b9e665b2450d36282614b7945e570b3480a456d1e1 # via gcp-docuploader google-crc32c==1.5.0 \ --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ @@ -244,28 +248,36 @@ google-crc32c==1.5.0 \ # via # google-cloud-storage # google-resumable-media -google-resumable-media==2.6.0 \ - --hash=sha256:972852f6c65f933e15a4a210c2b96930763b47197cdf4aa5f5bea435efb626e7 \ - --hash=sha256:fc03d344381970f79eebb632a3c18bb1828593a2dc5572b5f90115ef7d11e81b +google-resumable-media==2.7.1 \ + --hash=sha256:103ebc4ba331ab1bfdac0250f8033627a2cd7cde09e7ccff9181e31ba4315b2c \ + --hash=sha256:eae451a7b2e2cdbaaa0fd2eb00cc8a1ee5e95e16b55597359cbc3d27d7d90e33 # via google-cloud-storage -googleapis-common-protos==1.61.0 \ - --hash=sha256:22f1915393bb3245343f6efe87f6fe868532efc12aa26b391b15132e1279f1c0 \ - --hash=sha256:8a64866a97f6304a7179873a465d6eee97b7a24ec6cfd78e0f575e96b821240b +googleapis-common-protos==1.63.2 \ + --hash=sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945 \ + --hash=sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87 # via google-api-core idna==3.7 \ --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \ --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 # via requests -importlib-metadata==6.8.0 \ - --hash=sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb \ - --hash=sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743 +importlib-metadata==8.0.0 \ + --hash=sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f \ + --hash=sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812 # via # -r requirements.in # keyring # twine -jaraco-classes==3.3.0 \ - --hash=sha256:10afa92b6743f25c0cf5f37c6bb6e18e2c5bb84a16527ccfc0040ea377e7aaeb \ - --hash=sha256:c063dd08e89217cee02c8d5e5ec560f2c8ce6cdc2fcdc2e68f7b2e5547ed3621 +jaraco-classes==3.4.0 \ + --hash=sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd \ + --hash=sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790 + # via keyring +jaraco-context==5.3.0 \ + --hash=sha256:3e16388f7da43d384a1a7cd3452e72e14732ac9fe459678773a3608a812bf266 \ + --hash=sha256:c2f67165ce1f9be20f32f650f25d8edfc1646a8aeee48ae06fb35f90763576d2 + # via keyring +jaraco-functools==4.0.1 \ + --hash=sha256:3b24ccb921d6b593bdceb56ce14799204f473976e2a9d4b15b04d0f2c2326664 \ + --hash=sha256:d33fa765374c0611b52f8b3a795f8900869aa88c84769d4d1746cd68fb28c3e8 # via keyring jeepney==0.8.0 \ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ @@ -273,13 +285,13 @@ jeepney==0.8.0 \ # via # keyring # secretstorage -jinja2==3.1.3 \ - --hash=sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa \ - --hash=sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90 +jinja2==3.1.4 \ + --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \ + --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d # via gcp-releasetool -keyring==24.2.0 \ - --hash=sha256:4901caaf597bfd3bbd78c9a0c7c4c29fcd8310dab2cffefe749e916b6527acd6 \ - --hash=sha256:ca0746a19ec421219f4d713f848fa297a661a8a8c1504867e55bfb5e09091509 +keyring==25.2.1 \ + --hash=sha256:2458681cdefc0dbc0b7eb6cf75d0b98e59f9ad9b2d4edd319d18f68bdca95e50 \ + --hash=sha256:daaffd42dbda25ddafb1ad5fec4024e5bbcfe424597ca1ca452b299861e49f1b # via # gcp-releasetool # twine @@ -287,146 +299,153 @@ markdown-it-py==3.0.0 \ --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb # via rich -markupsafe==2.1.3 \ - --hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \ - --hash=sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e \ - --hash=sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431 \ - --hash=sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686 \ - --hash=sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c \ - --hash=sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559 \ - --hash=sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc \ - --hash=sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb \ - --hash=sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939 \ - --hash=sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c \ - --hash=sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0 \ - --hash=sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4 \ - --hash=sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9 \ - --hash=sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575 \ - --hash=sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba \ - --hash=sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d \ - --hash=sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd \ - --hash=sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3 \ - --hash=sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00 \ - --hash=sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155 \ - --hash=sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac \ - --hash=sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52 \ - --hash=sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f \ - --hash=sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8 \ - --hash=sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b \ - --hash=sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007 \ - --hash=sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24 \ - --hash=sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea \ - --hash=sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198 \ - --hash=sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0 \ - --hash=sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee \ - --hash=sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be \ - --hash=sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2 \ - --hash=sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1 \ - --hash=sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707 \ - --hash=sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6 \ - --hash=sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c \ - --hash=sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58 \ - --hash=sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823 \ - --hash=sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779 \ - --hash=sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636 \ - --hash=sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c \ - --hash=sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad \ - --hash=sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee \ - --hash=sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc \ - --hash=sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2 \ - --hash=sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48 \ - --hash=sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7 \ - --hash=sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e \ - --hash=sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b \ - --hash=sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa \ - --hash=sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5 \ - --hash=sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e \ - --hash=sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb \ - --hash=sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9 \ - --hash=sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57 \ - --hash=sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc \ - --hash=sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc \ - --hash=sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2 \ - --hash=sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11 +markupsafe==2.1.5 \ + --hash=sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf \ + --hash=sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff \ + --hash=sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f \ + --hash=sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3 \ + --hash=sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532 \ + --hash=sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f \ + --hash=sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617 \ + --hash=sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df \ + --hash=sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4 \ + --hash=sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906 \ + --hash=sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f \ + --hash=sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4 \ + --hash=sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8 \ + --hash=sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371 \ + --hash=sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2 \ + --hash=sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465 \ + --hash=sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52 \ + --hash=sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6 \ + --hash=sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169 \ + --hash=sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad \ + --hash=sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2 \ + --hash=sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0 \ + --hash=sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029 \ + --hash=sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f \ + --hash=sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a \ + --hash=sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced \ + --hash=sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5 \ + --hash=sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c \ + --hash=sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf \ + --hash=sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9 \ + --hash=sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb \ + --hash=sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad \ + --hash=sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3 \ + --hash=sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1 \ + --hash=sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46 \ + --hash=sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc \ + --hash=sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a \ + --hash=sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee \ + --hash=sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900 \ + --hash=sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5 \ + --hash=sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea \ + --hash=sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f \ + --hash=sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5 \ + --hash=sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e \ + --hash=sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a \ + --hash=sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f \ + --hash=sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50 \ + --hash=sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a \ + --hash=sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b \ + --hash=sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4 \ + --hash=sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff \ + --hash=sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2 \ + --hash=sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46 \ + --hash=sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b \ + --hash=sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf \ + --hash=sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5 \ + --hash=sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5 \ + --hash=sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab \ + --hash=sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd \ + --hash=sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68 # via jinja2 mdurl==0.1.2 \ --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba # via markdown-it-py -more-itertools==10.1.0 \ - --hash=sha256:626c369fa0eb37bac0291bce8259b332fd59ac792fa5497b59837309cd5b114a \ - --hash=sha256:64e0735fcfdc6f3464ea133afe8ea4483b1c5fe3a3d69852e6503b43a0b222e6 - # via jaraco-classes -nh3==0.2.14 \ - --hash=sha256:116c9515937f94f0057ef50ebcbcc10600860065953ba56f14473ff706371873 \ - --hash=sha256:18415df36db9b001f71a42a3a5395db79cf23d556996090d293764436e98e8ad \ - --hash=sha256:203cac86e313cf6486704d0ec620a992c8bc164c86d3a4fd3d761dd552d839b5 \ - --hash=sha256:2b0be5c792bd43d0abef8ca39dd8acb3c0611052ce466d0401d51ea0d9aa7525 \ - --hash=sha256:377aaf6a9e7c63962f367158d808c6a1344e2b4f83d071c43fbd631b75c4f0b2 \ - --hash=sha256:525846c56c2bcd376f5eaee76063ebf33cf1e620c1498b2a40107f60cfc6054e \ - --hash=sha256:5529a3bf99402c34056576d80ae5547123f1078da76aa99e8ed79e44fa67282d \ - --hash=sha256:7771d43222b639a4cd9e341f870cee336b9d886de1ad9bec8dddab22fe1de450 \ - --hash=sha256:88c753efbcdfc2644a5012938c6b9753f1c64a5723a67f0301ca43e7b85dcf0e \ - --hash=sha256:93a943cfd3e33bd03f77b97baa11990148687877b74193bf777956b67054dcc6 \ - --hash=sha256:9be2f68fb9a40d8440cbf34cbf40758aa7f6093160bfc7fb018cce8e424f0c3a \ - --hash=sha256:a0c509894fd4dccdff557068e5074999ae3b75f4c5a2d6fb5415e782e25679c4 \ - --hash=sha256:ac8056e937f264995a82bf0053ca898a1cb1c9efc7cd68fa07fe0060734df7e4 \ - --hash=sha256:aed56a86daa43966dd790ba86d4b810b219f75b4bb737461b6886ce2bde38fd6 \ - --hash=sha256:e8986f1dd3221d1e741fda0a12eaa4a273f1d80a35e31a1ffe579e7c621d069e \ - --hash=sha256:f99212a81c62b5f22f9e7c3e347aa00491114a5647e1f13bbebd79c3e5f08d75 +more-itertools==10.3.0 \ + --hash=sha256:e5d93ef411224fbcef366a6e8ddc4c5781bc6359d43412a65dd5964e46111463 \ + --hash=sha256:ea6a02e24a9161e51faad17a8782b92a0df82c12c1c8886fec7f0c3fa1a1b320 + # via + # jaraco-classes + # jaraco-functools +nh3==0.2.17 \ + --hash=sha256:0316c25b76289cf23be6b66c77d3608a4fdf537b35426280032f432f14291b9a \ + --hash=sha256:1a814dd7bba1cb0aba5bcb9bebcc88fd801b63e21e2450ae6c52d3b3336bc911 \ + --hash=sha256:1aa52a7def528297f256de0844e8dd680ee279e79583c76d6fa73a978186ddfb \ + --hash=sha256:22c26e20acbb253a5bdd33d432a326d18508a910e4dcf9a3316179860d53345a \ + --hash=sha256:40015514022af31975c0b3bca4014634fa13cb5dc4dbcbc00570acc781316dcc \ + --hash=sha256:40d0741a19c3d645e54efba71cb0d8c475b59135c1e3c580f879ad5514cbf028 \ + --hash=sha256:551672fd71d06cd828e282abdb810d1be24e1abb7ae2543a8fa36a71c1006fe9 \ + --hash=sha256:66f17d78826096291bd264f260213d2b3905e3c7fae6dfc5337d49429f1dc9f3 \ + --hash=sha256:85cdbcca8ef10733bd31f931956f7fbb85145a4d11ab9e6742bbf44d88b7e351 \ + --hash=sha256:a3f55fabe29164ba6026b5ad5c3151c314d136fd67415a17660b4aaddacf1b10 \ + --hash=sha256:b4427ef0d2dfdec10b641ed0bdaf17957eb625b2ec0ea9329b3d28806c153d71 \ + --hash=sha256:ba73a2f8d3a1b966e9cdba7b211779ad8a2561d2dba9674b8a19ed817923f65f \ + --hash=sha256:c21bac1a7245cbd88c0b0e4a420221b7bfa838a2814ee5bb924e9c2f10a1120b \ + --hash=sha256:c551eb2a3876e8ff2ac63dff1585236ed5dfec5ffd82216a7a174f7c5082a78a \ + --hash=sha256:c790769152308421283679a142dbdb3d1c46c79c823008ecea8e8141db1a2062 \ + --hash=sha256:d7a25fd8c86657f5d9d576268e3b3767c5cd4f42867c9383618be8517f0f022a # via readme-renderer -nox==2023.4.22 \ - --hash=sha256:0b1adc619c58ab4fa57d6ab2e7823fe47a32e70202f287d78474adcc7bda1891 \ - --hash=sha256:46c0560b0dc609d7d967dc99e22cb463d3c4caf54a5fda735d6c11b5177e3a9f +nox==2024.4.15 \ + --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \ + --hash=sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f # via -r requirements.in -packaging==23.2 \ - --hash=sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5 \ - --hash=sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7 +packaging==24.1 \ + --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ + --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 # via # gcp-releasetool # nox -pkginfo==1.9.6 \ - --hash=sha256:4b7a555a6d5a22169fcc9cf7bfd78d296b0361adad412a346c1226849af5e546 \ - --hash=sha256:8fd5896e8718a4372f0ea9cc9d96f6417c9b986e23a4d116dda26b62cc29d046 +pkginfo==1.10.0 \ + --hash=sha256:5df73835398d10db79f8eecd5cd86b1f6d29317589ea70796994d49399af6297 \ + --hash=sha256:889a6da2ed7ffc58ab5b900d888ddce90bce912f2d2de1dc1c26f4cb9fe65097 # via twine -platformdirs==3.11.0 \ - --hash=sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3 \ - --hash=sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e +platformdirs==4.2.2 \ + --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \ + --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3 # via virtualenv -protobuf==4.25.3 \ - --hash=sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4 \ - --hash=sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8 \ - --hash=sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c \ - --hash=sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d \ - --hash=sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4 \ - --hash=sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa \ - --hash=sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c \ - --hash=sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019 \ - --hash=sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9 \ - --hash=sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c \ - --hash=sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2 +proto-plus==1.24.0 \ + --hash=sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445 \ + --hash=sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12 + # via google-api-core +protobuf==5.27.2 \ + --hash=sha256:0e341109c609749d501986b835f667c6e1e24531096cff9d34ae411595e26505 \ + --hash=sha256:176c12b1f1c880bf7a76d9f7c75822b6a2bc3db2d28baa4d300e8ce4cde7409b \ + --hash=sha256:354d84fac2b0d76062e9b3221f4abbbacdfd2a4d8af36bab0474f3a0bb30ab38 \ + --hash=sha256:4fadd8d83e1992eed0248bc50a4a6361dc31bcccc84388c54c86e530b7f58863 \ + --hash=sha256:54330f07e4949d09614707c48b06d1a22f8ffb5763c159efd5c0928326a91470 \ + --hash=sha256:610e700f02469c4a997e58e328cac6f305f649826853813177e6290416e846c6 \ + --hash=sha256:7fc3add9e6003e026da5fc9e59b131b8f22b428b991ccd53e2af8071687b4fce \ + --hash=sha256:9e8f199bf7f97bd7ecebffcae45ebf9527603549b2b562df0fbc6d4d688f14ca \ + --hash=sha256:a109916aaac42bff84702fb5187f3edadbc7c97fc2c99c5ff81dd15dcce0d1e5 \ + --hash=sha256:b848dbe1d57ed7c191dfc4ea64b8b004a3f9ece4bf4d0d80a367b76df20bf36e \ + --hash=sha256:f3ecdef226b9af856075f28227ff2c90ce3a594d092c39bee5513573f25e2714 # via # gcp-docuploader # gcp-releasetool # google-api-core # googleapis-common-protos -pyasn1==0.5.0 \ - --hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \ - --hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde + # proto-plus +pyasn1==0.6.0 \ + --hash=sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c \ + --hash=sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473 # via # pyasn1-modules # rsa -pyasn1-modules==0.3.0 \ - --hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \ - --hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d +pyasn1-modules==0.4.0 \ + --hash=sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6 \ + --hash=sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b # via google-auth -pycparser==2.21 \ - --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ - --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 +pycparser==2.22 \ + --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \ + --hash=sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc # via cffi -pygments==2.16.1 \ - --hash=sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692 \ - --hash=sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29 +pygments==2.18.0 \ + --hash=sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199 \ + --hash=sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a # via # readme-renderer # rich @@ -434,20 +453,20 @@ pyjwt==2.8.0 \ --hash=sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de \ --hash=sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320 # via gcp-releasetool -pyperclip==1.8.2 \ - --hash=sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57 +pyperclip==1.9.0 \ + --hash=sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310 # via gcp-releasetool -python-dateutil==2.8.2 \ - --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ - --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 +python-dateutil==2.9.0.post0 \ + --hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \ + --hash=sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427 # via gcp-releasetool -readme-renderer==42.0 \ - --hash=sha256:13d039515c1f24de668e2c93f2e877b9dbe6c6c32328b90a40a49d8b2b85f36d \ - --hash=sha256:2d55489f83be4992fe4454939d1a051c33edbab778e82761d060c9fc6b308cd1 +readme-renderer==43.0 \ + --hash=sha256:1818dd28140813509eeed8d62687f7cd4f7bad90d4db586001c5dc09d4fde311 \ + --hash=sha256:19db308d86ecd60e5affa3b2a98f017af384678c63c88e5d4556a380e674f3f9 # via twine -requests==2.31.0 \ - --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ - --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1 +requests==2.32.3 \ + --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ + --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 # via # gcp-releasetool # google-api-core @@ -462,9 +481,9 @@ rfc3986==2.0.0 \ --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c # via twine -rich==13.6.0 \ - --hash=sha256:2b38e2fe9ca72c9a00170a1a2d20c63c790d0e10ef1fe35eba76e1e7b1d7d245 \ - --hash=sha256:5c14d22737e6d5084ef4771b62d5d4363165b403455a30a1c8ca39dc7b644bef +rich==13.7.1 \ + --hash=sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222 \ + --hash=sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432 # via twine rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ @@ -480,35 +499,39 @@ six==1.16.0 \ # via # gcp-docuploader # python-dateutil -twine==4.0.2 \ - --hash=sha256:929bc3c280033347a00f847236564d1c52a3e61b1ac2516c97c48f3ceab756d8 \ - --hash=sha256:9e102ef5fdd5a20661eb88fad46338806c3bd32cf1db729603fe3697b1bc83c8 +tomli==2.0.1 \ + --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ + --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f + # via nox +twine==5.1.1 \ + --hash=sha256:215dbe7b4b94c2c50a7315c0275d2258399280fbb7d04182c7e55e24b5f93997 \ + --hash=sha256:9aa0825139c02b3434d913545c7b847a21c835e11597f5255842d457da2322db # via -r requirements.in -typing-extensions==4.8.0 \ - --hash=sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0 \ - --hash=sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef +typing-extensions==4.12.2 \ + --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ + --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 # via -r requirements.in -urllib3==2.0.7 \ - --hash=sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84 \ - --hash=sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e +urllib3==2.2.2 \ + --hash=sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472 \ + --hash=sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168 # via # requests # twine -virtualenv==20.24.6 \ - --hash=sha256:02ece4f56fbf939dbbc33c0715159951d6bf14aaf5457b092e4548e1382455af \ - --hash=sha256:520d056652454c5098a00c0f073611ccbea4c79089331f60bf9d7ba247bb7381 +virtualenv==20.26.3 \ + --hash=sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a \ + --hash=sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589 # via nox -wheel==0.41.3 \ - --hash=sha256:488609bc63a29322326e05560731bf7bfea8e48ad646e1f5e40d366607de0942 \ - --hash=sha256:4d4987ce51a49370ea65c0bfd2234e8ce80a12780820d9dc462597a6e60d0841 +wheel==0.43.0 \ + --hash=sha256:465ef92c69fa5c5da2d1cf8ac40559a8c940886afcef87dcf14b9470862f1d85 \ + --hash=sha256:55c570405f142630c6b9f72fe09d9b67cf1477fcf543ae5b8dcb1f5b7377da81 # via -r requirements.in -zipp==3.17.0 \ - --hash=sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31 \ - --hash=sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0 +zipp==3.19.2 \ + --hash=sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19 \ + --hash=sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==69.2.0 \ - --hash=sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e \ - --hash=sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c +setuptools==70.2.0 \ + --hash=sha256:b8b8060bb426838fbe942479c90296ce976249451118ef566a5a0b7d8b78fb05 \ + --hash=sha256:bd63e505105011b25c3c11f753f7e3b8465ea739efddaccef8f0efac2137bac1 # via -r requirements.in diff --git a/packages/db-dtypes/.kokoro/test-samples-against-head.sh b/packages/db-dtypes/.kokoro/test-samples-against-head.sh index 63ac41dfae1d..e9d8bd79a644 100755 --- a/packages/db-dtypes/.kokoro/test-samples-against-head.sh +++ b/packages/db-dtypes/.kokoro/test-samples-against-head.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/db-dtypes/.kokoro/test-samples-impl.sh b/packages/db-dtypes/.kokoro/test-samples-impl.sh index 5a0f5fab6a89..55910c8ba178 100755 --- a/packages/db-dtypes/.kokoro/test-samples-impl.sh +++ b/packages/db-dtypes/.kokoro/test-samples-impl.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/db-dtypes/.kokoro/test-samples.sh b/packages/db-dtypes/.kokoro/test-samples.sh index 50b35a48c190..7933d820149a 100755 --- a/packages/db-dtypes/.kokoro/test-samples.sh +++ b/packages/db-dtypes/.kokoro/test-samples.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/db-dtypes/.kokoro/trampoline.sh b/packages/db-dtypes/.kokoro/trampoline.sh index d85b1f267693..48f79699706e 100755 --- a/packages/db-dtypes/.kokoro/trampoline.sh +++ b/packages/db-dtypes/.kokoro/trampoline.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/db-dtypes/.kokoro/trampoline_v2.sh b/packages/db-dtypes/.kokoro/trampoline_v2.sh index 59a7cf3a9373..35fa529231dc 100755 --- a/packages/db-dtypes/.kokoro/trampoline_v2.sh +++ b/packages/db-dtypes/.kokoro/trampoline_v2.sh @@ -1,5 +1,5 @@ #!/usr/bin/env bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/db-dtypes/.pre-commit-config.yaml b/packages/db-dtypes/.pre-commit-config.yaml index 6a8e16950664..1d74695f70b6 100644 --- a/packages/db-dtypes/.pre-commit-config.yaml +++ b/packages/db-dtypes/.pre-commit-config.yaml @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/db-dtypes/.trampolinerc b/packages/db-dtypes/.trampolinerc index a7dfeb42c6d0..0080152373d5 100644 --- a/packages/db-dtypes/.trampolinerc +++ b/packages/db-dtypes/.trampolinerc @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/db-dtypes/MANIFEST.in b/packages/db-dtypes/MANIFEST.in index e0a66705318e..d6814cd60037 100644 --- a/packages/db-dtypes/MANIFEST.in +++ b/packages/db-dtypes/MANIFEST.in @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/db-dtypes/docs/conf.py b/packages/db-dtypes/docs/conf.py index ff2f01a39d07..00e001300b43 100644 --- a/packages/db-dtypes/docs/conf.py +++ b/packages/db-dtypes/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/db-dtypes/owlbot.py b/packages/db-dtypes/owlbot.py index 2c0ea58068ce..18bd6238073f 100644 --- a/packages/db-dtypes/owlbot.py +++ b/packages/db-dtypes/owlbot.py @@ -34,7 +34,7 @@ "pandas": "https://pandas.pydata.org/pandas-docs/stable/" }, ) -s.move(templated_files, excludes=["docs/multiprocessing.rst", "README.rst", ".github/workflows/unittest.yml"]) +s.move(templated_files, excludes=["docs/multiprocessing.rst", "README.rst", ".github/workflows/unittest.yml", "noxfile.py"]) # ---------------------------------------------------------------------------- # Fixup files @@ -44,160 +44,6 @@ [".coveragerc"], "google/cloud/__init__.py", "db_dtypes/requirements.py", ) -s.replace( - ["noxfile.py"], r"[\"']google[\"']", '"db_dtypes"', -) - -s.replace( - ["noxfile.py"], r"import shutil", "import re\nimport shutil", -) - -s.replace( - ["noxfile.py"], "--cov=google", "--cov=db_dtypes", -) - -# There are no system tests for this package. -old_sessions = """ - "unit", - "system", - "cover", - "lint", -""" - -new_sessions = """ - "lint", - "unit", - "unit_prerelease", - "compliance", - "compliance_prerelease", - "cover", -""" - -s.replace(["noxfile.py"], old_sessions, new_sessions) - -# Add compliance tests. -s.replace( - ["noxfile.py"], r"def default\(session\):", "def default(session, tests_path):" -) -s.replace(["noxfile.py"], r'os.path.join\("tests", "unit"\),', "tests_path,") -s.replace( - ["noxfile.py"], - r'f"--junitxml=unit_{session.python}_sponge_log.xml",', - r'f"--junitxml={os.path.split(tests_path)[-1]}_{session.python}_sponge_log.xml",', -) -s.replace( - ["noxfile.py"], - r''' -@nox.session\(python=UNIT_TEST_PYTHON_VERSIONS\) -def unit\(session\): - """Run the unit test suite.""" - default\(session\) -''', - r''' -def prerelease(session, tests_path): - constraints_path = str( - CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" - ) - - # PyArrow prerelease packages are published to an alternative PyPI host. - # https://arrow.apache.org/docs/python/install.html#installing-nightly-packages - session.install( - "--extra-index-url", - "https://pypi.fury.io/arrow-nightlies/", - "--prefer-binary", - "--pre", - "--upgrade", - "pyarrow", - ) - # Avoid pandas==2.2.0rc0 as this version causes PyArrow to fail. Once newer - # prerelease comes out, this constraint can be removed. See - # https://github.com/googleapis/python-db-dtypes-pandas/issues/234 - session.install( - "--extra-index-url", - "https://pypi.anaconda.org/scipy-wheels-nightly/simple", - "--prefer-binary", - "--pre", - "--upgrade", - "pandas!=2.2.0rc0", - ) - session.install( - "mock", - "asyncmock", - "pytest", - "pytest-cov", - "pytest-asyncio", - "-c", - constraints_path, - ) - - # Because we test minimum dependency versions on the minimum Python - # version, the first version we test with in the unit tests sessions has a - # constraints file containing all dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - deps = [ - match.group(1) - for match in re.finditer( - r"^\\s*(\\S+)(?===\\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - # We use --no-deps to ensure that pre-release versions aren't overwritten - # by the version ranges in setup.py. - session.install(*deps) - session.install("--no-deps", "-e", ".") - - # Print out prerelease package versions. - session.run("python", "-m", "pip", "freeze") - - # Run py.test against the unit tests. - session.run( - "py.test", - "--quiet", - f"--junitxml={os.path.split(tests_path)[-1]}_prerelease_{session.python}_sponge_log.xml", - "--cov=db_dtypes", - "--cov=tests/unit", - "--cov-append", - "--cov-config=.coveragerc", - "--cov-report=", - "--cov-fail-under=0", - tests_path, - *session.posargs, - ) - - -@nox.session(python=UNIT_TEST_PYTHON_VERSIONS[-1]) -def compliance(session): - """Run the compliance test suite.""" - default(session, os.path.join("tests", "compliance")) - - -@nox.session(python=UNIT_TEST_PYTHON_VERSIONS[-1]) -def compliance_prerelease(session): - """Run the compliance test suite with prerelease dependencies.""" - prerelease(session, os.path.join("tests", "compliance")) - - -@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) -def unit(session): - """Run the unit test suite.""" - default(session, os.path.join("tests", "unit")) - - -@nox.session(python=UNIT_TEST_PYTHON_VERSIONS[-1]) -def unit_prerelease(session): - """Run the unit test suite with prerelease dependencies.""" - prerelease(session, os.path.join("tests", "unit")) -''', -) - # ---------------------------------------------------------------------------- # Samples templates # ---------------------------------------------------------------------------- diff --git a/packages/db-dtypes/scripts/decrypt-secrets.sh b/packages/db-dtypes/scripts/decrypt-secrets.sh index 0018b421ddf8..120b0ddc4364 100755 --- a/packages/db-dtypes/scripts/decrypt-secrets.sh +++ b/packages/db-dtypes/scripts/decrypt-secrets.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2023 Google LLC All rights reserved. +# Copyright 2024 Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/db-dtypes/scripts/readme-gen/readme_gen.py b/packages/db-dtypes/scripts/readme-gen/readme_gen.py index 1acc119835b5..8f5e248a0da1 100644 --- a/packages/db-dtypes/scripts/readme-gen/readme_gen.py +++ b/packages/db-dtypes/scripts/readme-gen/readme_gen.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From 432a76d1a4515d571d9685633d198df39a58454d Mon Sep 17 00:00:00 2001 From: Chelsea Lin <124939984+chelsea-lin@users.noreply.github.com> Date: Mon, 8 Jul 2024 16:10:51 -0700 Subject: [PATCH 157/210] chore: format code files with nox (#281) --- packages/db-dtypes/db_dtypes/__init__.py | 4 +--- packages/db-dtypes/db_dtypes/core.py | 1 - packages/db-dtypes/db_dtypes/pandas_backports.py | 1 - packages/db-dtypes/noxfile.py | 1 - packages/db-dtypes/samples/snippets/pandas_date_and_time.py | 2 ++ packages/db-dtypes/setup.py | 1 + .../db-dtypes/tests/compliance/date/test_date_compliance.py | 3 +-- .../db-dtypes/tests/compliance/time/test_time_compliance.py | 3 +-- packages/db-dtypes/tests/unit/test_arrow.py | 1 - packages/db-dtypes/tests/unit/test_date.py | 1 - 10 files changed, 6 insertions(+), 12 deletions(-) diff --git a/packages/db-dtypes/db_dtypes/__init__.py b/packages/db-dtypes/db_dtypes/__init__.py index 54721a3e7bd1..ad4ea331b6c0 100644 --- a/packages/db-dtypes/db_dtypes/__init__.py +++ b/packages/db-dtypes/db_dtypes/__init__.py @@ -27,10 +27,8 @@ import pyarrow import pyarrow.compute - -from db_dtypes.version import __version__ from db_dtypes import core - +from db_dtypes.version import __version__ date_dtype_name = "dbdate" time_dtype_name = "dbtime" diff --git a/packages/db-dtypes/db_dtypes/core.py b/packages/db-dtypes/db_dtypes/core.py index 567b931658c4..7c9eb6b9a834 100644 --- a/packages/db-dtypes/db_dtypes/core.py +++ b/packages/db-dtypes/db_dtypes/core.py @@ -21,7 +21,6 @@ from db_dtypes import pandas_backports - pandas_release = pandas_backports.pandas_release diff --git a/packages/db-dtypes/db_dtypes/pandas_backports.py b/packages/db-dtypes/db_dtypes/pandas_backports.py index 0966e8319f07..e3aea1783fd6 100644 --- a/packages/db-dtypes/db_dtypes/pandas_backports.py +++ b/packages/db-dtypes/db_dtypes/pandas_backports.py @@ -29,7 +29,6 @@ import pandas.compat.numpy.function import pandas.core.nanops - pandas_release = packaging.version.parse(pandas.__version__).release # Create aliases for private methods in case they move in a future version. diff --git a/packages/db-dtypes/noxfile.py b/packages/db-dtypes/noxfile.py index 102670a8574b..d6735da7a82d 100644 --- a/packages/db-dtypes/noxfile.py +++ b/packages/db-dtypes/noxfile.py @@ -21,7 +21,6 @@ import os import pathlib import re -import re import shutil from typing import Dict, List import warnings diff --git a/packages/db-dtypes/samples/snippets/pandas_date_and_time.py b/packages/db-dtypes/samples/snippets/pandas_date_and_time.py index 3292e6cc82a5..b6e55813064c 100644 --- a/packages/db-dtypes/samples/snippets/pandas_date_and_time.py +++ b/packages/db-dtypes/samples/snippets/pandas_date_and_time.py @@ -17,7 +17,9 @@ def pandas_date_and_time(): # [START bigquery_pandas_date_create] import datetime + import pandas as pd + import db_dtypes # noqa import to register dtypes dates = pd.Series([datetime.date(2021, 9, 17), "2021-9-18"], dtype="dbdate") diff --git a/packages/db-dtypes/setup.py b/packages/db-dtypes/setup.py index e91b109130f5..276a9401081f 100644 --- a/packages/db-dtypes/setup.py +++ b/packages/db-dtypes/setup.py @@ -15,6 +15,7 @@ import io import os import re + from setuptools import setup # Package metadata. diff --git a/packages/db-dtypes/tests/compliance/date/test_date_compliance.py b/packages/db-dtypes/tests/compliance/date/test_date_compliance.py index 5c43287c8d9a..038005a5d9bb 100644 --- a/packages/db-dtypes/tests/compliance/date/test_date_compliance.py +++ b/packages/db-dtypes/tests/compliance/date/test_date_compliance.py @@ -21,13 +21,12 @@ """ import pandas -from pandas.tests.extension import base import pandas._testing as tm +from pandas.tests.extension import base import pytest import db_dtypes - # TODO(https://github.com/googleapis/python-db-dtypes-pandas/issues/87): Add # compliance tests for arithmetic operations. diff --git a/packages/db-dtypes/tests/compliance/time/test_time_compliance.py b/packages/db-dtypes/tests/compliance/time/test_time_compliance.py index 3b36d164e22a..f894ba5497c7 100644 --- a/packages/db-dtypes/tests/compliance/time/test_time_compliance.py +++ b/packages/db-dtypes/tests/compliance/time/test_time_compliance.py @@ -21,13 +21,12 @@ """ import pandas -from pandas.tests.extension import base import pandas._testing as tm +from pandas.tests.extension import base import pytest import db_dtypes - # TODO(https://github.com/googleapis/python-db-dtypes-pandas/issues/87): Add # compliance tests for arithmetic operations. diff --git a/packages/db-dtypes/tests/unit/test_arrow.py b/packages/db-dtypes/tests/unit/test_arrow.py index 56bbd01b7d25..a7a38a274133 100644 --- a/packages/db-dtypes/tests/unit/test_arrow.py +++ b/packages/db-dtypes/tests/unit/test_arrow.py @@ -23,7 +23,6 @@ import db_dtypes - SECOND_NANOS = 1_000_000_000 MINUTE_NANOS = 60 * SECOND_NANOS HOUR_NANOS = 60 * MINUTE_NANOS diff --git a/packages/db-dtypes/tests/unit/test_date.py b/packages/db-dtypes/tests/unit/test_date.py index fddf1a0a22a5..9dbc6d9ccddc 100644 --- a/packages/db-dtypes/tests/unit/test_date.py +++ b/packages/db-dtypes/tests/unit/test_date.py @@ -25,7 +25,6 @@ import db_dtypes from db_dtypes import pandas_backports - VALUE_PARSING_TEST_CASES = [ # Min/Max values for pandas.Timestamp. ("1677-09-22", datetime.date(1677, 9, 22)), From f87902a41a4945d14dbe7511f51843ecfbf164e1 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 8 Jul 2024 23:22:34 +0000 Subject: [PATCH 158/210] chore(python): use python 3.10 for docs build (#282) Source-Link: https://github.com/googleapis/synthtool/commit/9ae07858520bf035a3d5be569b5a65d960ee4392 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:52210e0e0559f5ea8c52be148b33504022e1faef4e95fbe4b32d68022af2fa7e Co-authored-by: Owl Bot Co-authored-by: Lingqing Gan --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 4 +- .../db-dtypes/.kokoro/docker/docs/Dockerfile | 21 +++++---- .../.kokoro/docker/docs/requirements.txt | 40 ++++++++-------- packages/db-dtypes/.kokoro/requirements.txt | 46 +++++++++---------- 4 files changed, 59 insertions(+), 52 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 91d742b5b9fe..f30cb3775afc 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:d3de8a02819f65001effcbd3ea76ce97e9bcff035c7a89457f40f892c87c5b32 -# created: 2024-07-03T17:43:00.77142528Z + digest: sha256:52210e0e0559f5ea8c52be148b33504022e1faef4e95fbe4b32d68022af2fa7e +# created: 2024-07-08T19:25:35.862283192Z diff --git a/packages/db-dtypes/.kokoro/docker/docs/Dockerfile b/packages/db-dtypes/.kokoro/docker/docs/Dockerfile index a26ce61930f5..5205308b334d 100644 --- a/packages/db-dtypes/.kokoro/docker/docs/Dockerfile +++ b/packages/db-dtypes/.kokoro/docker/docs/Dockerfile @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ubuntu:22.04 +from ubuntu:24.04 ENV DEBIAN_FRONTEND noninteractive @@ -40,7 +40,6 @@ RUN apt-get update \ libssl-dev \ libsqlite3-dev \ portaudio19-dev \ - python3-distutils \ redis-server \ software-properties-common \ ssh \ @@ -60,18 +59,22 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* \ && rm -f /var/cache/apt/archives/*.deb -###################### Install python 3.9.13 -# Download python 3.9.13 -RUN wget https://www.python.org/ftp/python/3.9.13/Python-3.9.13.tgz +###################### Install python 3.10.14 for docs/docfx session + +# Download python 3.10.14 +RUN wget https://www.python.org/ftp/python/3.10.14/Python-3.10.14.tgz # Extract files -RUN tar -xvf Python-3.9.13.tgz +RUN tar -xvf Python-3.10.14.tgz -# Install python 3.9.13 -RUN ./Python-3.9.13/configure --enable-optimizations +# Install python 3.10.14 +RUN ./Python-3.10.14/configure --enable-optimizations RUN make altinstall +RUN python3.10 -m venv /venv +ENV PATH /venv/bin:$PATH + ###################### Install pip RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ && python3 /tmp/get-pip.py \ @@ -84,4 +87,4 @@ RUN python3 -m pip COPY requirements.txt /requirements.txt RUN python3 -m pip install --require-hashes -r requirements.txt -CMD ["python3.8"] +CMD ["python3.10"] diff --git a/packages/db-dtypes/.kokoro/docker/docs/requirements.txt b/packages/db-dtypes/.kokoro/docker/docs/requirements.txt index 0e5d70f20f83..7129c7715594 100644 --- a/packages/db-dtypes/.kokoro/docker/docs/requirements.txt +++ b/packages/db-dtypes/.kokoro/docker/docs/requirements.txt @@ -4,9 +4,9 @@ # # pip-compile --allow-unsafe --generate-hashes requirements.in # -argcomplete==3.2.3 \ - --hash=sha256:bf7900329262e481be5a15f56f19736b376df6f82ed27576fa893652c5de6c23 \ - --hash=sha256:c12355e0494c76a2a7b73e3a59b09024ca0ba1e279fb9ed6c1b82d5b74b6a70c +argcomplete==3.4.0 \ + --hash=sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5 \ + --hash=sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f # via nox colorlog==6.8.2 \ --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ @@ -16,23 +16,27 @@ distlib==0.3.8 \ --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 # via virtualenv -filelock==3.13.1 \ - --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ - --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c +filelock==3.15.4 \ + --hash=sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb \ + --hash=sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7 # via virtualenv -nox==2024.3.2 \ - --hash=sha256:e53514173ac0b98dd47585096a55572fe504fecede58ced708979184d05440be \ - --hash=sha256:f521ae08a15adbf5e11f16cb34e8d0e6ea521e0b92868f684e91677deb974553 +nox==2024.4.15 \ + --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \ + --hash=sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f # via -r requirements.in -packaging==24.0 \ - --hash=sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5 \ - --hash=sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9 +packaging==24.1 \ + --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ + --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 # via nox -platformdirs==4.2.0 \ - --hash=sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068 \ - --hash=sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768 +platformdirs==4.2.2 \ + --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \ + --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3 # via virtualenv -virtualenv==20.25.1 \ - --hash=sha256:961c026ac520bac5f69acb8ea063e8a4f071bcc9457b9c1f28f6b085c511583a \ - --hash=sha256:e08e13ecdca7a0bd53798f356d5831434afa5b07b93f0abdf0797b7a06ffe197 +tomli==2.0.1 \ + --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ + --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f + # via nox +virtualenv==20.26.3 \ + --hash=sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a \ + --hash=sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589 # via nox diff --git a/packages/db-dtypes/.kokoro/requirements.txt b/packages/db-dtypes/.kokoro/requirements.txt index 35ece0e4d2e9..9622baf0ba38 100644 --- a/packages/db-dtypes/.kokoro/requirements.txt +++ b/packages/db-dtypes/.kokoro/requirements.txt @@ -20,9 +20,9 @@ cachetools==5.3.3 \ --hash=sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945 \ --hash=sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105 # via google-auth -certifi==2024.6.2 \ - --hash=sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516 \ - --hash=sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56 +certifi==2024.7.4 \ + --hash=sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b \ + --hash=sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90 # via requests cffi==1.16.0 \ --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ @@ -371,23 +371,23 @@ more-itertools==10.3.0 \ # via # jaraco-classes # jaraco-functools -nh3==0.2.17 \ - --hash=sha256:0316c25b76289cf23be6b66c77d3608a4fdf537b35426280032f432f14291b9a \ - --hash=sha256:1a814dd7bba1cb0aba5bcb9bebcc88fd801b63e21e2450ae6c52d3b3336bc911 \ - --hash=sha256:1aa52a7def528297f256de0844e8dd680ee279e79583c76d6fa73a978186ddfb \ - --hash=sha256:22c26e20acbb253a5bdd33d432a326d18508a910e4dcf9a3316179860d53345a \ - --hash=sha256:40015514022af31975c0b3bca4014634fa13cb5dc4dbcbc00570acc781316dcc \ - --hash=sha256:40d0741a19c3d645e54efba71cb0d8c475b59135c1e3c580f879ad5514cbf028 \ - --hash=sha256:551672fd71d06cd828e282abdb810d1be24e1abb7ae2543a8fa36a71c1006fe9 \ - --hash=sha256:66f17d78826096291bd264f260213d2b3905e3c7fae6dfc5337d49429f1dc9f3 \ - --hash=sha256:85cdbcca8ef10733bd31f931956f7fbb85145a4d11ab9e6742bbf44d88b7e351 \ - --hash=sha256:a3f55fabe29164ba6026b5ad5c3151c314d136fd67415a17660b4aaddacf1b10 \ - --hash=sha256:b4427ef0d2dfdec10b641ed0bdaf17957eb625b2ec0ea9329b3d28806c153d71 \ - --hash=sha256:ba73a2f8d3a1b966e9cdba7b211779ad8a2561d2dba9674b8a19ed817923f65f \ - --hash=sha256:c21bac1a7245cbd88c0b0e4a420221b7bfa838a2814ee5bb924e9c2f10a1120b \ - --hash=sha256:c551eb2a3876e8ff2ac63dff1585236ed5dfec5ffd82216a7a174f7c5082a78a \ - --hash=sha256:c790769152308421283679a142dbdb3d1c46c79c823008ecea8e8141db1a2062 \ - --hash=sha256:d7a25fd8c86657f5d9d576268e3b3767c5cd4f42867c9383618be8517f0f022a +nh3==0.2.18 \ + --hash=sha256:0411beb0589eacb6734f28d5497ca2ed379eafab8ad8c84b31bb5c34072b7164 \ + --hash=sha256:14c5a72e9fe82aea5fe3072116ad4661af5cf8e8ff8fc5ad3450f123e4925e86 \ + --hash=sha256:19aaba96e0f795bd0a6c56291495ff59364f4300d4a39b29a0abc9cb3774a84b \ + --hash=sha256:34c03fa78e328c691f982b7c03d4423bdfd7da69cd707fe572f544cf74ac23ad \ + --hash=sha256:36c95d4b70530b320b365659bb5034341316e6a9b30f0b25fa9c9eff4c27a204 \ + --hash=sha256:3a157ab149e591bb638a55c8c6bcb8cdb559c8b12c13a8affaba6cedfe51713a \ + --hash=sha256:42c64511469005058cd17cc1537578eac40ae9f7200bedcfd1fc1a05f4f8c200 \ + --hash=sha256:5f36b271dae35c465ef5e9090e1fdaba4a60a56f0bb0ba03e0932a66f28b9189 \ + --hash=sha256:6955369e4d9f48f41e3f238a9e60f9410645db7e07435e62c6a9ea6135a4907f \ + --hash=sha256:7b7c2a3c9eb1a827d42539aa64091640bd275b81e097cd1d8d82ef91ffa2e811 \ + --hash=sha256:8ce0f819d2f1933953fca255db2471ad58184a60508f03e6285e5114b6254844 \ + --hash=sha256:94a166927e53972a9698af9542ace4e38b9de50c34352b962f4d9a7d4c927af4 \ + --hash=sha256:a7f1b5b2c15866f2db413a3649a8fe4fd7b428ae58be2c0f6bca5eefd53ca2be \ + --hash=sha256:c8b3a1cebcba9b3669ed1a84cc65bf005728d2f0bc1ed2a6594a992e817f3a50 \ + --hash=sha256:de3ceed6e661954871d6cd78b410213bdcb136f79aafe22aa7182e028b8c7307 \ + --hash=sha256:f0eca9ca8628dbb4e916ae2491d72957fdd35f7a5d326b7032a345f111ac07fe # via readme-renderer nox==2024.4.15 \ --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \ @@ -460,9 +460,9 @@ python-dateutil==2.9.0.post0 \ --hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \ --hash=sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427 # via gcp-releasetool -readme-renderer==43.0 \ - --hash=sha256:1818dd28140813509eeed8d62687f7cd4f7bad90d4db586001c5dc09d4fde311 \ - --hash=sha256:19db308d86ecd60e5affa3b2a98f017af384678c63c88e5d4556a380e674f3f9 +readme-renderer==44.0 \ + --hash=sha256:2fbca89b81a08526aadf1357a8c2ae889ec05fb03f5da67f9769c9a592166151 \ + --hash=sha256:8712034eabbfa6805cacf1402b4eeb2a73028f72d1166d6f5cb7f9c047c5d1e1 # via twine requests==2.32.3 \ --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ From 2d88b72b1b06cd0d6ac882f651ea086ca33cee12 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tim=20Swe=C3=B1a=20=28Swast=29?= Date: Tue, 9 Jul 2024 15:12:38 -0500 Subject: [PATCH 159/210] chore: remove references to conda (#283) --- packages/db-dtypes/noxfile.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/packages/db-dtypes/noxfile.py b/packages/db-dtypes/noxfile.py index d6735da7a82d..0366bb0802f8 100644 --- a/packages/db-dtypes/noxfile.py +++ b/packages/db-dtypes/noxfile.py @@ -204,8 +204,6 @@ def prerelease(session, tests_path): # prerelease comes out, this constraint can be removed. See # https://github.com/googleapis/python-db-dtypes-pandas/issues/234 session.install( - "--extra-index-url", - "https://pypi.anaconda.org/scipy-wheels-nightly/simple", "--prefer-binary", "--pre", "--upgrade", From ddaa4222d6b1c1135889c7da0aba318131b5bcb2 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 31 Jul 2024 10:41:25 -0700 Subject: [PATCH 160/210] chore(python): fix docs build (#285) Source-Link: https://github.com/googleapis/synthtool/commit/bef813d194de29ddf3576eda60148b6b3dcc93d9 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:94bb690db96e6242b2567a4860a94d48fa48696d092e51b0884a1a2c0a79a407 Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 4 ++-- .../db-dtypes/.kokoro/docker/docs/Dockerfile | 9 ++++----- packages/db-dtypes/.kokoro/publish-docs.sh | 20 +++++++++---------- 3 files changed, 16 insertions(+), 17 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index f30cb3775afc..6d064ddb9b06 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:52210e0e0559f5ea8c52be148b33504022e1faef4e95fbe4b32d68022af2fa7e -# created: 2024-07-08T19:25:35.862283192Z + digest: sha256:94bb690db96e6242b2567a4860a94d48fa48696d092e51b0884a1a2c0a79a407 +# created: 2024-07-31T14:52:44.926548819Z diff --git a/packages/db-dtypes/.kokoro/docker/docs/Dockerfile b/packages/db-dtypes/.kokoro/docker/docs/Dockerfile index 5205308b334d..e5410e296bd8 100644 --- a/packages/db-dtypes/.kokoro/docker/docs/Dockerfile +++ b/packages/db-dtypes/.kokoro/docker/docs/Dockerfile @@ -72,19 +72,18 @@ RUN tar -xvf Python-3.10.14.tgz RUN ./Python-3.10.14/configure --enable-optimizations RUN make altinstall -RUN python3.10 -m venv /venv -ENV PATH /venv/bin:$PATH +ENV PATH /usr/local/bin/python3.10:$PATH ###################### Install pip RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ - && python3 /tmp/get-pip.py \ + && python3.10 /tmp/get-pip.py \ && rm /tmp/get-pip.py # Test pip -RUN python3 -m pip +RUN python3.10 -m pip # Install build requirements COPY requirements.txt /requirements.txt -RUN python3 -m pip install --require-hashes -r requirements.txt +RUN python3.10 -m pip install --require-hashes -r requirements.txt CMD ["python3.10"] diff --git a/packages/db-dtypes/.kokoro/publish-docs.sh b/packages/db-dtypes/.kokoro/publish-docs.sh index 38f083f05aa0..233205d580e9 100755 --- a/packages/db-dtypes/.kokoro/publish-docs.sh +++ b/packages/db-dtypes/.kokoro/publish-docs.sh @@ -21,18 +21,18 @@ export PYTHONUNBUFFERED=1 export PATH="${HOME}/.local/bin:${PATH}" # Install nox -python3 -m pip install --require-hashes -r .kokoro/requirements.txt -python3 -m nox --version +python3.10 -m pip install --require-hashes -r .kokoro/requirements.txt +python3.10 -m nox --version # build docs nox -s docs # create metadata -python3 -m docuploader create-metadata \ +python3.10 -m docuploader create-metadata \ --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ - --version=$(python3 setup.py --version) \ + --version=$(python3.10 setup.py --version) \ --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ - --distribution-name=$(python3 setup.py --name) \ + --distribution-name=$(python3.10 setup.py --name) \ --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) @@ -40,18 +40,18 @@ python3 -m docuploader create-metadata \ cat docs.metadata # upload docs -python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}" +python3.10 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}" # docfx yaml files nox -s docfx # create metadata. -python3 -m docuploader create-metadata \ +python3.10 -m docuploader create-metadata \ --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ - --version=$(python3 setup.py --version) \ + --version=$(python3.10 setup.py --version) \ --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ - --distribution-name=$(python3 setup.py --name) \ + --distribution-name=$(python3.10 setup.py --name) \ --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) @@ -59,4 +59,4 @@ python3 -m docuploader create-metadata \ cat docs.metadata # upload docs -python3 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}" +python3.10 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}" From d349771dd6542edad3b9c0500dfae185790f27da Mon Sep 17 00:00:00 2001 From: Chelsea Lin <124939984+chelsea-lin@users.noreply.github.com> Date: Thu, 1 Aug 2024 09:38:48 -0700 Subject: [PATCH 161/210] chore: add sync-repo-settings and exclude unit 3.7 (#287) --- .../db-dtypes/.github/sync-repo-settings.yaml | 34 +++++++++++++++++++ 1 file changed, 34 insertions(+) create mode 100644 packages/db-dtypes/.github/sync-repo-settings.yaml diff --git a/packages/db-dtypes/.github/sync-repo-settings.yaml b/packages/db-dtypes/.github/sync-repo-settings.yaml new file mode 100644 index 000000000000..92e3e7692b05 --- /dev/null +++ b/packages/db-dtypes/.github/sync-repo-settings.yaml @@ -0,0 +1,34 @@ +# https://github.com/googleapis/repo-automation-bots/tree/main/packages/sync-repo-settings +# Rules for main branch protection +branchProtectionRules: +# Identifies the protection rule pattern. Name of the branch to be protected. +# Defaults to `main` +- pattern: main + requiresCodeOwnerReviews: true + requiresStrictStatusChecks: false + requiredStatusCheckContexts: + - 'conventionalcommits.org' + - 'cla/google' + - 'docs' + - 'lint' + - 'unit (3.8)' + - 'unit (3.9)' + - 'unit (3.10)' + - 'unit (3.11)' + - 'unit (3.12)' + - 'cover' +permissionRules: + - team: actools-python + permission: admin + - team: actools + permission: admin + - team: api-bigquery + permission: push + - team: api-bigquery-dataframe + permission: push + - team: yoshi-python + permission: push + - team: python-samples-owners + permission: push + - team: python-samples-reviewers + permission: push \ No newline at end of file From dbbd20a4ef15c47f32a6cd33b95401dcf9890c8b Mon Sep 17 00:00:00 2001 From: Chelsea Lin <124939984+chelsea-lin@users.noreply.github.com> Date: Tue, 6 Aug 2024 07:37:02 -0700 Subject: [PATCH 162/210] chore: integrate compliance tests into code coverage reporting (#288) * chore: integrate compliance tests into code coverage reporting * fixing * fixing --- .../db-dtypes/.github/workflows/unittest.yml | 28 +++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/packages/db-dtypes/.github/workflows/unittest.yml b/packages/db-dtypes/.github/workflows/unittest.yml index 89f021e695e5..81ff447f30c1 100644 --- a/packages/db-dtypes/.github/workflows/unittest.yml +++ b/packages/db-dtypes/.github/workflows/unittest.yml @@ -58,11 +58,39 @@ jobs: name: coverage-artifact-prerelease-${{ matrix.python }} path: .coverage-prerelease-${{ matrix.python }} + compliance: + runs-on: ubuntu-latest + strategy: + matrix: + python: ['3.12'] + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python }} + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run compliance tests + env: + COVERAGE_FILE: .coverage-${{ matrix.python }} + run: | + nox -s compliance-${{ matrix.python }} + - name: Upload coverage results + uses: actions/upload-artifact@v4 + with: + name: coverage-artifact-compliance-${{ matrix.python }} + path: .coverage-compliance-${{ matrix.python }} + cover: runs-on: ubuntu-latest needs: - unit - unit-prerelease + - compliance steps: - name: Checkout uses: actions/checkout@v4 From 9ab2f2fc96b56c066fc5f14cf5bd1b04c8ce8f98 Mon Sep 17 00:00:00 2001 From: Chelsea Lin <124939984+chelsea-lin@users.noreply.github.com> Date: Thu, 8 Aug 2024 11:21:32 -0700 Subject: [PATCH 163/210] feat: create db_dtypes JSONDtype and JSONArray (#284) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Copy JSONDtype and JSONArray from tests/extension/json and their tests * formatting * converts to ArrowStringArray * box and unbox between string(storage) and dict(getitem) * minor * fix test_getitem_scalar test * add docstring and remove unused functions * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix lint * address some comments * supports all types except Array * support array type * only import when pandas version is higher than 1.5.0 * exclude groupby and other tests * others * skip jsondtype and jsonarray * fixing * fix coverage file name * add a simple unit test * unit-test for some functionalities * address comments * fix test cover * fixing * Update db_dtypes/json.py * fixing * fixing * add pyarrow_dtypes * fixing --------- Co-authored-by: Owl Bot Co-authored-by: Tim Sweña (Swast) --- .../db-dtypes/.github/workflows/unittest.yml | 2 +- packages/db-dtypes/db_dtypes/__init__.py | 34 +- packages/db-dtypes/db_dtypes/json.py | 209 ++++++++++ .../tests/compliance/json/conftest.py | 181 +++++++++ .../compliance/json/test_json_compliance.py | 361 ++++++++++++++++++ packages/db-dtypes/tests/unit/test_json.py | 95 +++++ 6 files changed, 873 insertions(+), 9 deletions(-) create mode 100644 packages/db-dtypes/db_dtypes/json.py create mode 100644 packages/db-dtypes/tests/compliance/json/conftest.py create mode 100644 packages/db-dtypes/tests/compliance/json/test_json_compliance.py create mode 100644 packages/db-dtypes/tests/unit/test_json.py diff --git a/packages/db-dtypes/.github/workflows/unittest.yml b/packages/db-dtypes/.github/workflows/unittest.yml index 81ff447f30c1..0c2dca0fea8a 100644 --- a/packages/db-dtypes/.github/workflows/unittest.yml +++ b/packages/db-dtypes/.github/workflows/unittest.yml @@ -76,7 +76,7 @@ jobs: python -m pip install nox - name: Run compliance tests env: - COVERAGE_FILE: .coverage-${{ matrix.python }} + COVERAGE_FILE: .coverage-compliance-${{ matrix.python }} run: | nox -s compliance-${{ matrix.python }} - name: Upload coverage results diff --git a/packages/db-dtypes/db_dtypes/__init__.py b/packages/db-dtypes/db_dtypes/__init__.py index ad4ea331b6c0..d27e93e1222a 100644 --- a/packages/db-dtypes/db_dtypes/__init__.py +++ b/packages/db-dtypes/db_dtypes/__init__.py @@ -43,7 +43,14 @@ # nanosecond precision when boxing scalars. _NP_BOX_DTYPE = "datetime64[us]" -pandas_release = packaging.version.parse(pandas.__version__).release + +# To use JSONArray and JSONDtype, you'll need Pandas 1.5.0 or later. With the removal +# of Python 3.7 compatibility, the minimum Pandas version will be updated to 1.5.0. +if packaging.version.Version(pandas.__version__) >= packaging.version.Version("1.5.0"): + from db_dtypes.json import JSONArray, JSONDtype +else: + JSONArray = None + JSONDtype = None @pandas.api.extensions.register_extension_dtype @@ -337,10 +344,21 @@ def __sub__(self, other): return super().__sub__(other) -__all__ = [ - "__version__", - "DateArray", - "DateDtype", - "TimeArray", - "TimeDtype", -] +if not JSONArray or not JSONDtype: + __all__ = [ + "__version__", + "DateArray", + "DateDtype", + "TimeArray", + "TimeDtype", + ] +else: + __all__ = [ + "__version__", + "DateArray", + "DateDtype", + "JSONDtype", + "JSONArray", + "TimeArray", + "TimeDtype", + ] diff --git a/packages/db-dtypes/db_dtypes/json.py b/packages/db-dtypes/db_dtypes/json.py new file mode 100644 index 000000000000..ed04b720919b --- /dev/null +++ b/packages/db-dtypes/db_dtypes/json.py @@ -0,0 +1,209 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +import json + +import numpy as np +import pandas as pd +import pandas.arrays as arrays +import pandas.core.dtypes.common as common +import pandas.core.indexers as indexers +import pyarrow as pa +import pyarrow.compute + + +@pd.api.extensions.register_extension_dtype +class JSONDtype(pd.api.extensions.ExtensionDtype): + """Extension dtype for BigQuery JSON data.""" + + name = "dbjson" + + @property + def na_value(self) -> pd.NA: + """Default NA value to use for this type.""" + return pd.NA + + @property + def type(self) -> type[str]: + """ + Return the scalar type for the array elements. + The standard JSON data types can be one of `dict`, `list`, `str`, `int`, `float`, + `bool` and `None`. However, this method returns a `str` type to indicate its + storage type, because the union of multiple types are not supported well in pandas. + """ + return str + + @property + def pyarrow_dtype(self): + """Return the pyarrow data type used for storing data in the pyarrow array.""" + return pa.string() + + @property + def _is_numeric(self) -> bool: + return False + + @property + def _is_boolean(self) -> bool: + return False + + @classmethod + def construct_array_type(cls): + """Return the array type associated with this dtype.""" + return JSONArray + + +class JSONArray(arrays.ArrowExtensionArray): + """Extension array that handles BigQuery JSON data, leveraging a string-based + pyarrow array for storage. It enables seamless conversion to JSON objects when + accessing individual elements.""" + + _dtype = JSONDtype() + + def __init__(self, values, dtype=None, copy=False) -> None: + self._dtype = JSONDtype() + if isinstance(values, pa.Array): + self._pa_array = pa.chunked_array([values]) + elif isinstance(values, pa.ChunkedArray): + self._pa_array = values + else: + raise ValueError(f"Unsupported type '{type(values)}' for JSONArray") + + @classmethod + def _box_pa( + cls, value, pa_type: pa.DataType | None = None + ) -> pa.Array | pa.ChunkedArray | pa.Scalar: + """Box value into a pyarrow Array, ChunkedArray or Scalar.""" + assert pa_type is None or pa_type == cls._dtype.pyarrow_dtype + + if isinstance(value, pa.Scalar) or not ( + common.is_list_like(value) and not common.is_dict_like(value) + ): + return cls._box_pa_scalar(value) + return cls._box_pa_array(value) + + @classmethod + def _box_pa_scalar(cls, value) -> pa.Scalar: + """Box value into a pyarrow Scalar.""" + if pd.isna(value): + pa_scalar = pa.scalar(None, type=cls._dtype.pyarrow_dtype) + else: + value = JSONArray._serialize_json(value) + pa_scalar = pa.scalar( + value, type=cls._dtype.pyarrow_dtype, from_pandas=True + ) + + return pa_scalar + + @classmethod + def _box_pa_array(cls, value, copy: bool = False) -> pa.Array | pa.ChunkedArray: + """Box value into a pyarrow Array or ChunkedArray.""" + if isinstance(value, cls): + pa_array = value._pa_array + else: + value = [JSONArray._serialize_json(x) for x in value] + pa_array = pa.array(value, type=cls._dtype.pyarrow_dtype, from_pandas=True) + return pa_array + + @classmethod + def _from_sequence(cls, scalars, *, dtype=None, copy=False): + """Construct a new ExtensionArray from a sequence of scalars.""" + pa_array = cls._box_pa(scalars) + arr = cls(pa_array) + return arr + + @staticmethod + def _serialize_json(value): + """A static method that converts a JSON value into a string representation.""" + if not common.is_list_like(value) and pd.isna(value): + return value + else: + # `sort_keys=True` sorts dictionary keys before serialization, making + # JSON comparisons deterministic. + return json.dumps(value, sort_keys=True) + + @staticmethod + def _deserialize_json(value): + """A static method that converts a JSON string back into its original value.""" + if not pd.isna(value): + return json.loads(value) + else: + return value + + @property + def dtype(self) -> JSONDtype: + """An instance of JSONDtype""" + return self._dtype + + def _cmp_method(self, other, op): + if op.__name__ == "eq": + result = pyarrow.compute.equal(self._pa_array, self._box_pa(other)) + elif op.__name__ == "ne": + result = pyarrow.compute.not_equal(self._pa_array, self._box_pa(other)) + else: + # Comparison is not a meaningful one. We don't want to support sorting by JSON columns. + raise TypeError(f"{op.__name__} not supported for JSONArray") + return arrays.ArrowExtensionArray(result) + + def __getitem__(self, item): + """Select a subset of self.""" + item = indexers.check_array_indexer(self, item) + + if isinstance(item, np.ndarray): + if not len(item): + return type(self)(pa.chunked_array([], type=self.dtype.pyarrow_dtype)) + elif item.dtype.kind in "iu": + return self.take(item) + else: + # `check_array_indexer` should verify that the assertion hold true. + assert item.dtype.kind == "b" + return type(self)(self._pa_array.filter(item)) + elif isinstance(item, tuple): + item = indexers.unpack_tuple_and_ellipses(item) + + if common.is_scalar(item) and not common.is_integer(item): + # e.g. "foo" or 2.5 + # exception message copied from numpy + raise IndexError( + r"only integers, slices (`:`), ellipsis (`...`), numpy.newaxis " + r"(`None`) and integer or boolean arrays are valid indices" + ) + + value = self._pa_array[item] + if isinstance(value, pa.ChunkedArray): + return type(self)(value) + else: + scalar = JSONArray._deserialize_json(value.as_py()) + if scalar is None: + return self._dtype.na_value + else: + return scalar + + def __iter__(self): + """Iterate over elements of the array.""" + for value in self._pa_array: + val = JSONArray._deserialize_json(value.as_py()) + if val is None: + yield self._dtype.na_value + else: + yield val + + def _reduce( + self, name: str, *, skipna: bool = True, keepdims: bool = False, **kwargs + ): + """Return a scalar result of performing the reduction operation.""" + if name in ["min", "max"]: + raise TypeError("JSONArray does not support min/max reducntion.") + super()._reduce(name, skipna=skipna, keepdims=keepdims, **kwargs) diff --git a/packages/db-dtypes/tests/compliance/json/conftest.py b/packages/db-dtypes/tests/compliance/json/conftest.py new file mode 100644 index 000000000000..74870c4867d3 --- /dev/null +++ b/packages/db-dtypes/tests/compliance/json/conftest.py @@ -0,0 +1,181 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import json +import random + +import numpy as np +import pandas as pd +import pytest + +from db_dtypes import JSONArray, JSONDtype + + +def make_data(): + # Since the `np.array` constructor needs a consistent shape after the first + # dimension, the samples data in this instance doesn't include the array type. + samples = [ + True, # Boolean + 100, # Int + 0.98, # Float + "str", # String + {"bool_value": True}, # Dict with a boolean + {"float_num": 3.14159}, # Dict with a float + {"date": "2024-07-16"}, # Dict with a date (as strings) + {"null_field": None}, # Dict with a null + {"list_data": [10, 20, 30]}, # Dict with a list + {"person": {"name": "Alice", "age": 35}}, # Dict with nested objects + {"address": {"street": "123 Main St", "city": "Anytown"}}, + {"order": {"items": ["book", "pen"], "total": 15.99}}, + ] + data = np.random.default_rng(2).choice(samples, size=100) + # This replaces a single data item with an array. We are skipping the first two + # items to avoid some `setitem` tests failed, because setting with a list is + # ambiguity in this context. + id = random.randint(3, 99) + data[id] = [0.1, 0.2] # Array + return data + + +@pytest.fixture +def dtype(): + return JSONDtype() + + +@pytest.fixture +def data(): + """Length-100 PeriodArray for semantics test.""" + data = make_data() + + return JSONArray._from_sequence(data) + + +@pytest.fixture +def data_for_twos(dtype): + """ + Length-100 array in which all the elements are two. + + Call pytest.skip in your fixture if the dtype does not support divmod. + """ + pytest.skip(f"{dtype} is not a numeric dtype") + + +@pytest.fixture +def data_missing(): + """Length 2 array with [NA, Valid]""" + return JSONArray._from_sequence([None, {"a": 10}]) + + +@pytest.fixture +def data_missing_for_sorting(): + return JSONArray._from_sequence([json.dumps({"b": 1}), None, json.dumps({"a": 4})]) + + +@pytest.fixture +def na_cmp(): + """ + Binary operator for comparing NA values. + + Should return a function of two arguments that returns + True if both arguments are (scalar) NA for your type. + + By default, uses ``operator.is_`` + """ + + def cmp(a, b): + return lambda left, right: pd.isna(left) and pd.isna(right) + + return cmp + + +@pytest.fixture +def data_repeated(data): + """ + Generate many datasets. + + Parameters + ---------- + data : fixture implementing `data` + + Returns + ------- + Callable[[int], Generator]: + A callable that takes a `count` argument and + returns a generator yielding `count` datasets. + """ + + def gen(count): + for _ in range(count): + yield data + + return gen + + +_all_numeric_accumulations = ["cumsum", "cumprod", "cummin", "cummax"] + + +@pytest.fixture(params=_all_numeric_accumulations) +def all_numeric_accumulations(request): + """ + Fixture for numeric accumulation names + """ + return request.param + + +_all_boolean_reductions = ["all", "any"] + + +@pytest.fixture(params=_all_boolean_reductions) +def all_boolean_reductions(request): + """ + Fixture for boolean reduction names. + """ + return request.param + + +_all_numeric_reductions = [ + "count", + "sum", + "max", + "min", + "mean", + "prod", + "std", + "var", + "median", + "kurt", + "skew", + "sem", +] + + +@pytest.fixture(params=_all_numeric_reductions) +def all_numeric_reductions(request): + """ + Fixture for numeric reduction names. + """ + return request.param + + +@pytest.fixture(params=["data", "data_missing"]) +def all_data(request, data, data_missing): + """Parametrized fixture returning 'data' or 'data_missing' integer arrays. + + Used to test dtype conversion with and without missing values. + """ + if request.param == "data": + return data + elif request.param == "data_missing": + return data_missing diff --git a/packages/db-dtypes/tests/compliance/json/test_json_compliance.py b/packages/db-dtypes/tests/compliance/json/test_json_compliance.py new file mode 100644 index 000000000000..18610a0dd660 --- /dev/null +++ b/packages/db-dtypes/tests/compliance/json/test_json_compliance.py @@ -0,0 +1,361 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import typing + +import numpy as np +import pandas as pd +import pandas._testing as tm +import pandas.tests.extension.base as base +import pytest + + +class TestJSONArrayAccumulate(base.BaseAccumulateTests): + pass + + +class TestJSONArrayCasting(base.BaseCastingTests): + def test_astype_str(self, data): + # Use `json.dumps(str)` instead of passing `str(obj)` directly to the super method. + result = pd.Series(data[:5]).astype(str) + expected = pd.Series( + [json.dumps(x, sort_keys=True) for x in data[:5]], dtype=str + ) + tm.assert_series_equal(result, expected) + + @pytest.mark.parametrize( + "nullable_string_dtype", + [ + "string[python]", + "string[pyarrow]", + ], + ) + def test_astype_string(self, data, nullable_string_dtype): + # Use `json.dumps(str)` instead of passing `str(obj)` directly to the super method. + result = pd.Series(data[:5]).astype(nullable_string_dtype) + expected = pd.Series( + [json.dumps(x, sort_keys=True) for x in data[:5]], + dtype=nullable_string_dtype, + ) + tm.assert_series_equal(result, expected) + + +class TestJSONArrayConstructors(base.BaseConstructorsTests): + def test_from_dtype(self, data): + # construct from our dtype & string dtype + dtype = data.dtype + + expected = pd.Series(data) + result = pd.Series(list(data), dtype=dtype) + tm.assert_series_equal(result, expected) + + result = pd.Series(list(data), dtype=str(dtype)) + tm.assert_series_equal(result, expected) + + # Use `{"col1": data}` instead of passing `data` directly to the super method. + # This prevents the DataFrame constructor from attempting to interpret the + # dictionary as column headers. + + # gh-30280 + expected = pd.DataFrame({"col1": data}).astype(dtype) + result = pd.DataFrame({"col1": list(data)}, dtype=dtype) + tm.assert_frame_equal(result, expected) + + result = pd.DataFrame({"col1": list(data)}, dtype=str(dtype)) + tm.assert_frame_equal(result, expected) + + def test_series_constructor_scalar_with_index(self, data, dtype): + # Use json.dumps(data[0]) instead of passing data[0] directly to the super method. + # This prevents the Series constructor from attempting to interpret the dictionary + # as column headers. + scalar = json.dumps(data[0]) + result = pd.Series(scalar, index=[1, 2, 3], dtype=dtype) + expected = pd.Series([scalar] * 3, index=[1, 2, 3], dtype=dtype) + tm.assert_series_equal(result, expected) + + result = pd.Series(scalar, index=["foo"], dtype=dtype) + expected = pd.Series([scalar], index=["foo"], dtype=dtype) + tm.assert_series_equal(result, expected) + + +@pytest.mark.skip(reason="BigQuery does not allow group by a JSON-type column.") +class TestJSONArrayGroupby(base.BaseGroupbyTests): + pass + + +class TestJSONArrayDtype(base.BaseDtypeTests): + pass + + +class TestJSONArrayGetitem(base.BaseGetitemTests): + @pytest.mark.xfail(reason="JSONDtype's type returns its storage type.") + def test_getitem_scalar(self, data): + """ + `_getitem_` can return any JSON-types objects while `data.dtype.type` returns + a string to indicate its storage type. + > assert isinstance(result, data.dtype.type) + E AssertionError + """ + super().test_getitem_scalar(data) + + +class TestJSONArrayIndex(base.BaseIndexTests): + pass + + +class TestJSONArrayInterface(base.BaseInterfaceTests): + def test_array_interface(self, data): + result = np.array(data) + # Use `json.dumps(data[0])` instead of passing `data[0]` directly to the super method. + assert result[0] == json.dumps(data[0]) + + result = np.array(data, dtype=object) + # Use `json.dumps(x)` instead of passing `x` directly to the super method. + expected = np.array([json.dumps(x) for x in data], dtype=object) + # if expected.ndim > 1: + # # nested data, explicitly construct as 1D + # expected = construct_1d_object_array_from_listlike(list(data)) + tm.assert_numpy_array_equal(result, expected) + + @pytest.mark.skip(reason="2D support not implemented for JSONArray") + def test_view(self, data): + super().test_view(data) + + +class TestJSONArrayParsing(base.BaseParsingTests): + @pytest.mark.xfail(reason="data type 'json' not understood") + @pytest.mark.parametrize("engine", ["c", "python"]) + def test_EA_types(self, engine, data, request): + super().test_EA_types(engine, data, request) + + +class TestJSONArrayMethods(base.BaseMethodsTests): + @pytest.mark.xfail(reason="Unhashable") + def test_value_counts_with_normalize(self, data): + super().test_value_counts_with_normalize(data) + + @pytest.mark.skip("fill-value is interpreted as a dict of values") + def test_fillna_copy_frame(self, data_missing): + super().test_fillna_copy_frame(data_missing) + + @pytest.mark.xfail(reason="combine for JSONArray not supported") + def test_combine_le(self, data_repeated): + super().test_combine_le(data_repeated) + + @pytest.mark.skip(reason="'<' not supported between instances of 'dict' and 'dict'") + def test_searchsorted(self, data_for_sorting, as_series): + super().test_searchsorted(self, data_for_sorting, as_series) + + @pytest.mark.xfail( + reason="`to_numpy` returns serialized JSON, " + + "while `__getitem__` returns JSON objects." + ) + def test_where_series(self, data, na_value, as_frame): + # `Series.where` calls `to_numpy` to get results. + super().test_where_series(data, na_value, as_frame) + + @pytest.mark.skip(reason="BigQuery does not allow group by a JSON-type column.") + def test_factorize(self, data_for_grouping): + super().test_factorize(data_for_grouping) + + @pytest.mark.skip(reason="BigQuery does not allow group by a JSON-type column.") + def test_factorize_equivalence(self, data_for_grouping): + super().test_factorize_equivalence(data_for_grouping) + + @pytest.mark.skip(reason="BigQuery does not allow sort by a JSON-type column.") + def test_argsort(self, data_for_sorting): + super().test_argsort(data_for_sorting) + + @pytest.mark.skip(reason="BigQuery does not allow sort by a JSON-type column.") + def test_argmin_argmax(self, data_for_sorting): + super().test_argmin_argmax(data_for_sorting) + + @pytest.mark.skip(reason="BigQuery does not allow sort by a JSON-type column.") + def test_sort_values(self, data_for_sorting): + super().test_sort_values(data_for_sorting) + + @pytest.mark.skip(reason="BigQuery does not allow sort by a JSON-type column.") + def test_sort_values_frame(self, data_for_sorting): + super().test_sort_values_frame(data_for_sorting) + + +class TestJSONArrayMissing(base.BaseMissingTests): + @pytest.mark.xfail(reason="Setting a dict as a scalar") + def test_fillna_series(self): + """We treat dictionaries as a mapping in fillna, not a scalar.""" + super().test_fillna_series() + + @pytest.mark.xfail(reason="Setting a dict as a scalar") + def test_fillna_frame(self): + """We treat dictionaries as a mapping in fillna, not a scalar.""" + super().test_fillna_frame() + + +@pytest.mark.skip(reason="BigQuery JSON does not allow Arithmetic Ops.") +class TestJSONArrayArithmeticOps(base.BaseArithmeticOpsTests): + pass + + +class TestJSONArrayComparisonOps(base.BaseComparisonOpsTests): + def test_compare_array(self, data, comparison_op, request): + if comparison_op.__name__ not in ["eq", "ne"]: + mark = pytest.mark.xfail(reason="Comparison methods not implemented") + request.applymarker(mark) + super().test_compare_array(data, comparison_op) + + def test_compare_scalar(self, data, comparison_op, request): + if comparison_op.__name__ not in ["eq", "ne"]: + mark = pytest.mark.xfail(reason="Comparison methods not implemented") + request.applymarker(mark) + super().test_compare_scalar(data, comparison_op) + + def _cast_pointwise_result(self, op_name: str, obj, other, pointwise_result): + dtype = typing.cast(pd.StringDtype, tm.get_dtype(obj)) + if op_name in ["__add__", "__radd__"]: + cast_to = dtype + else: + cast_to = "boolean[pyarrow]" # type: ignore[assignment] + return pointwise_result.astype(cast_to) + + +class TestJSONArrayUnaryOps(base.BaseUnaryOpsTests): + pass + + +class TestJSONArrayPrinting(base.BasePrintingTests): + pass + + +class TestJSONArrayReduce(base.BaseReduceTests): + pass + + +class TestJSONArrayReshaping(base.BaseReshapingTests): + @pytest.mark.skip(reason="2D support not implemented for JSONArray") + def test_transpose(self, data): + super().test_transpose(data) + + @pytest.mark.xfail( + reason="`to_numpy` returns serialized JSON, " + + "while `__getitem__` returns JSON objects." + ) + def test_transpose_frame(self, data): + # `DataFrame.T` calls `to_numpy` to get results. + super().test_transpose_frame(data) + + +class TestJSONArraySetitem(base.BaseSetitemTests): + # Patching `[....] * len()` to base.BaseSetitemTests because pandas' internals + # has trouble setting sequences of values into scalar positions. + + @pytest.mark.parametrize( + "idx", + [[0, 1, 2], pd.array([0, 1, 2], dtype="Int64"), np.array([0, 1, 2])], + ids=["list", "integer-array", "numpy-array"], + ) + def test_setitem_integer_array(self, data, idx, box_in_series): + arr = data[:5].copy() + expected = data.take([0, 0, 0, 3, 4]) + + if box_in_series: + arr = pd.Series(arr) + expected = pd.Series(expected) + + # Use `[arr[0]] * len()` instead of passing `arr[0]` directly to the super method. + arr[idx] = [arr[0]] * len(arr[idx]) + tm.assert_equal(arr, expected) + + @pytest.mark.parametrize( + "mask", + [ + np.array([True, True, True, False, False]), + pd.array([True, True, True, False, False], dtype="boolean"), + pd.array([True, True, True, pd.NA, pd.NA], dtype="boolean"), + ], + ids=["numpy-array", "boolean-array", "boolean-array-na"], + ) + def test_setitem_mask(self, data, mask, box_in_series): + arr = data[:5].copy() + expected = arr.take([0, 0, 0, 3, 4]) + if box_in_series: + arr = pd.Series(arr) + expected = pd.Series(expected) + # Use `[data[0]] * len()` instead of passing `data[0]` directly to the super method. + arr[mask] = [data[0]] * len(arr[mask]) + tm.assert_equal(expected, arr) + + def test_setitem_loc_iloc_slice(self, data): + arr = data[:5].copy() + s = pd.Series(arr, index=["a", "b", "c", "d", "e"]) + expected = pd.Series(data.take([0, 0, 0, 3, 4]), index=s.index) + + result = s.copy() + # Use `[data[0]] * len()` instead of passing `data[0]` directly to the super method. + result.iloc[:3] = [data[0]] * len(result.iloc[:3]) + tm.assert_equal(result, expected) + + result = s.copy() + result.loc[:"c"] = [data[0]] * len(result.loc[:"c"]) + tm.assert_equal(result, expected) + + def test_setitem_slice(self, data, box_in_series): + arr = data[:5].copy() + expected = data.take([0, 0, 0, 3, 4]) + if box_in_series: + arr = pd.Series(arr) + expected = pd.Series(expected) + + # Use `[data[0]] * 3` instead of passing `data[0]` directly to the super method. + arr[:3] = [data[0]] * 3 + tm.assert_equal(arr, expected) + + @pytest.mark.xfail(reason="only integer scalar arrays can be converted") + def test_setitem_2d_values(self, data): + super().test_setitem_2d_values(data) + + @pytest.mark.xfail( + reason="`to_numpy` returns serialized JSON, " + + "while `__getitem__` returns JSON objects." + ) + def test_setitem_frame_2d_values(self, data): + super().test_setitem_frame_2d_values(data) + + @pytest.mark.parametrize("setter", ["loc", None]) + def test_setitem_mask_broadcast(self, data, setter): + ser = pd.Series(data) + mask = np.zeros(len(data), dtype=bool) + mask[:2] = True + + if setter: # loc + target = getattr(ser, setter) + else: # __setitem__ + target = ser + + # Use `[data[10]] * len()` instead of passing `data[10]` directly to the super method. + target[mask] = [data[10]] * len(target[mask]) + assert ser[0] == data[10] + assert ser[1] == data[10] + + @pytest.mark.xfail(reason="eq not implemented for ") + def test_setitem_mask_boolean_array_with_na(self, data, box_in_series): + super().test_setitem_mask_boolean_array_with_na(data, box_in_series) + + @pytest.mark.skip(reason="2D support not implemented for JSONArray") + def test_setitem_preserves_views(self, data): + super().test_setitem_preserves_views(data) + + +class TestJSONArrayDim2Compat(base.Dim2CompatTests): + pass diff --git a/packages/db-dtypes/tests/unit/test_json.py b/packages/db-dtypes/tests/unit/test_json.py new file mode 100644 index 000000000000..c48635d5a1bf --- /dev/null +++ b/packages/db-dtypes/tests/unit/test_json.py @@ -0,0 +1,95 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import json + +import pandas as pd +import pytest + +import db_dtypes + +# Check for minimum Pandas version. +pytest.importorskip("pandas", minversion="1.5.0") + + +# Python data types mirroring all standard JSON types: +# https://json-schema.org/understanding-json-schema/reference/type +JSON_DATA = { + "boolean": True, + "int": 100, + "float": 0.98, + "string": "hello world", + "array": [0.1, 0.2], + "dict": { + "null_field": None, + "order": { + "items": ["book", "pen", "computer"], + "total": 15.99, + "address": {"street": "123 Main St", "city": "Anytown"}, + }, + }, + "null": None, +} + + +def test_construct_w_unspported_types(): + with pytest.raises(ValueError): + db_dtypes.JSONArray(100) + + +def test_getitems_return_json_objects(): + data = db_dtypes.JSONArray._from_sequence(JSON_DATA.values()) + for id, key in enumerate(JSON_DATA.keys()): + if key == "null": + assert pd.isna(data[id]) + else: + assert data[id] == JSON_DATA[key] + + +def test_getitems_w_unboxed_dict(): + data = db_dtypes.JSONArray._from_sequence([JSON_DATA["dict"]]) + assert len(data[0]) == 2 + + assert data[0]["null_field"] is None + assert data[0]["order"]["address"]["city"] == "Anytown" + assert len(data[0]["order"]["items"]) == 3 + assert data[0]["order"]["items"][0] == "book" + + with pytest.raises(KeyError): + data[0]["unknown"] + + +def test_getitems_when_iter_with_null(): + data = db_dtypes.JSONArray._from_sequence([JSON_DATA["null"]]) + s = pd.Series(data) + result = s[:1].item() + assert pd.isna(result) + + +def test_to_numpy(): + s = pd.Series(db_dtypes.JSONArray._from_sequence(JSON_DATA.values())) + data = s.to_numpy() + for id, key in enumerate(JSON_DATA.keys()): + if key == "null": + assert pd.isna(data[id]) + else: + assert data[id] == json.dumps(JSON_DATA[key], sort_keys=True) + + +def test_deterministic_json_serialization(): + x = {"a": 0, "b": 1} + y = {"b": 1, "a": 0} + data = db_dtypes.JSONArray._from_sequence([x]) + assert y in data From 49285f3557eb2419c3ec3503131e19f7312df9db Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 12 Aug 2024 15:01:36 -0500 Subject: [PATCH 164/210] chore(main): release 1.3.0 (#265) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/db-dtypes/CHANGELOG.md | 12 ++++++++++++ packages/db-dtypes/db_dtypes/version.py | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/packages/db-dtypes/CHANGELOG.md b/packages/db-dtypes/CHANGELOG.md index 3c48e7b334ee..0d26b4aa9836 100644 --- a/packages/db-dtypes/CHANGELOG.md +++ b/packages/db-dtypes/CHANGELOG.md @@ -1,5 +1,17 @@ # Changelog +## [1.3.0](https://github.com/googleapis/python-db-dtypes-pandas/compare/v1.2.0...v1.3.0) (2024-08-08) + + +### Features + +* Create db_dtypes JSONDtype and JSONArray ([#284](https://github.com/googleapis/python-db-dtypes-pandas/issues/284)) ([76790a8](https://github.com/googleapis/python-db-dtypes-pandas/commit/76790a8c67ae8fa9687a4e6a6f950b15e6f34c6f)) + + +### Documentation + +* Add summary_overview template ([#264](https://github.com/googleapis/python-db-dtypes-pandas/issues/264)) ([a97c341](https://github.com/googleapis/python-db-dtypes-pandas/commit/a97c34198cbed37c8ff8ea683d485ebe36b804d7)) + ## [1.2.0](https://github.com/googleapis/python-db-dtypes-pandas/compare/v1.1.1...v1.2.0) (2023-12-10) diff --git a/packages/db-dtypes/db_dtypes/version.py b/packages/db-dtypes/db_dtypes/version.py index 82681053c986..8b2c0bc20730 100644 --- a/packages/db-dtypes/db_dtypes/version.py +++ b/packages/db-dtypes/db_dtypes/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "1.2.0" +__version__ = "1.3.0" From 2f2319e0ee4cfd9681cdc4b1332ac91405b47120 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 17 Sep 2024 11:56:51 -0400 Subject: [PATCH 165/210] build(python): release script update (#289) * build(python): release script update Source-Link: https://github.com/googleapis/synthtool/commit/71a72973dddbc66ea64073b53eda49f0d22e0942 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:e8dcfd7cbfd8beac3a3ff8d3f3185287ea0625d859168cc80faccfc9a7a00455 * See https://github.com/googleapis/synthtool/pull/2008 * See https://github.com/googleapis/synthtool/pull/2008 --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 4 ++-- packages/db-dtypes/.github/workflows/unittest.yml | 3 +++ packages/db-dtypes/.kokoro/release.sh | 2 +- packages/db-dtypes/.kokoro/release/common.cfg | 2 +- 4 files changed, 7 insertions(+), 4 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 6d064ddb9b06..597e0c3261ca 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:94bb690db96e6242b2567a4860a94d48fa48696d092e51b0884a1a2c0a79a407 -# created: 2024-07-31T14:52:44.926548819Z + digest: sha256:e8dcfd7cbfd8beac3a3ff8d3f3185287ea0625d859168cc80faccfc9a7a00455 +# created: 2024-09-16T21:04:09.091105552Z diff --git a/packages/db-dtypes/.github/workflows/unittest.yml b/packages/db-dtypes/.github/workflows/unittest.yml index 0c2dca0fea8a..000e945eb5ed 100644 --- a/packages/db-dtypes/.github/workflows/unittest.yml +++ b/packages/db-dtypes/.github/workflows/unittest.yml @@ -30,6 +30,7 @@ jobs: with: name: coverage-artifact-${{ matrix.python }} path: .coverage-${{ matrix.python }} + include-hidden-files: true unit-prerelease: runs-on: ubuntu-latest @@ -57,6 +58,7 @@ jobs: with: name: coverage-artifact-prerelease-${{ matrix.python }} path: .coverage-prerelease-${{ matrix.python }} + include-hidden-files: true compliance: runs-on: ubuntu-latest @@ -84,6 +86,7 @@ jobs: with: name: coverage-artifact-compliance-${{ matrix.python }} path: .coverage-compliance-${{ matrix.python }} + include-hidden-files: true cover: runs-on: ubuntu-latest diff --git a/packages/db-dtypes/.kokoro/release.sh b/packages/db-dtypes/.kokoro/release.sh index 7846e14cf211..ca599fb84db2 100755 --- a/packages/db-dtypes/.kokoro/release.sh +++ b/packages/db-dtypes/.kokoro/release.sh @@ -23,7 +23,7 @@ python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source / export PYTHONUNBUFFERED=1 # Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-1") +TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-2") cd github/python-db-dtypes-pandas python3 setup.py sdist bdist_wheel twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/packages/db-dtypes/.kokoro/release/common.cfg b/packages/db-dtypes/.kokoro/release/common.cfg index f3e607c295d2..8629d165c484 100644 --- a/packages/db-dtypes/.kokoro/release/common.cfg +++ b/packages/db-dtypes/.kokoro/release/common.cfg @@ -28,7 +28,7 @@ before_action { fetch_keystore { keystore_resource { keystore_config_id: 73713 - keyname: "google-cloud-pypi-token-keystore-1" + keyname: "google-cloud-pypi-token-keystore-2" } } } From 28e0c043102f7210a5be6bfee516ac1c9e58236e Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 20 Sep 2024 23:27:10 +0200 Subject: [PATCH 166/210] chore(deps): update all dependencies (#290) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- packages/db-dtypes/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/db-dtypes/samples/snippets/requirements-test.txt b/packages/db-dtypes/samples/snippets/requirements-test.txt index e56028c28e66..57b712fe7294 100644 --- a/packages/db-dtypes/samples/snippets/requirements-test.txt +++ b/packages/db-dtypes/samples/snippets/requirements-test.txt @@ -1,2 +1,2 @@ pytest===7.4.4; python_version == '3.7' # prevents dependabot from upgrading it -pytest==8.1.1; python_version > '3.7' +pytest==8.3.3; python_version > '3.7' From ab7b48ba85b7d2880e732df832e2b72406c758b7 Mon Sep 17 00:00:00 2001 From: Chalmer Lowe Date: Mon, 7 Oct 2024 15:43:44 -0400 Subject: [PATCH 167/210] chore: Adds python eol deprecation warning (#292) * chore: Adds python eol deprecation warning * add warnings module * includes sys_micro --- packages/db-dtypes/db_dtypes/__init__.py | 15 +++++++++ .../db-dtypes/db_dtypes/_versions_helpers.py | 32 +++++++++++++++++++ packages/db-dtypes/noxfile.py | 6 ++++ 3 files changed, 53 insertions(+) create mode 100644 packages/db-dtypes/db_dtypes/_versions_helpers.py diff --git a/packages/db-dtypes/db_dtypes/__init__.py b/packages/db-dtypes/db_dtypes/__init__.py index d27e93e1222a..952643b45058 100644 --- a/packages/db-dtypes/db_dtypes/__init__.py +++ b/packages/db-dtypes/db_dtypes/__init__.py @@ -18,6 +18,7 @@ import datetime import re from typing import Optional, Union +import warnings import numpy import packaging.version @@ -29,6 +30,8 @@ from db_dtypes import core from db_dtypes.version import __version__ +from . import _versions_helpers + date_dtype_name = "dbdate" time_dtype_name = "dbtime" @@ -344,6 +347,18 @@ def __sub__(self, other): return super().__sub__(other) +sys_major, sys_minor, sys_micro = _versions_helpers.extract_runtime_version() +if sys_major == 3 and sys_minor in (7, 8): + warnings.warn( + "The python-bigquery library will stop supporting Python 3.7 " + "and Python 3.8 in a future major release expected in Q4 2024. " + f"Your Python version is {sys_major}.{sys_minor}.{sys_micro}. We " + "recommend that you update soon to ensure ongoing support. For " + "more details, see: [Google Cloud Client Libraries Supported Python Versions policy](https://cloud.google.com/python/docs/supported-python-versions)", + PendingDeprecationWarning, + ) + + if not JSONArray or not JSONDtype: __all__ = [ "__version__", diff --git a/packages/db-dtypes/db_dtypes/_versions_helpers.py b/packages/db-dtypes/db_dtypes/_versions_helpers.py new file mode 100644 index 000000000000..37247c456d81 --- /dev/null +++ b/packages/db-dtypes/db_dtypes/_versions_helpers.py @@ -0,0 +1,32 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Shared helper functions for verifying versions of installed modules.""" + + +import sys +from typing import Tuple + + +def extract_runtime_version() -> Tuple[int, int, int]: + # Retrieve the version information + version_info = sys.version_info + + # Extract the major, minor, and micro components + major = version_info.major + minor = version_info.minor + micro = version_info.micro + + # Display the version number in a clear format + return major, minor, micro diff --git a/packages/db-dtypes/noxfile.py b/packages/db-dtypes/noxfile.py index 0366bb0802f8..9587e6e7dd7b 100644 --- a/packages/db-dtypes/noxfile.py +++ b/packages/db-dtypes/noxfile.py @@ -173,6 +173,7 @@ def default(session, tests_path): session.run( "py.test", "--quiet", + "-W default::PendingDeprecationWarning", f"--junitxml={os.path.split(tests_path)[-1]}_{session.python}_sponge_log.xml", "--cov=db_dtypes", "--cov=tests/unit", @@ -250,6 +251,7 @@ def prerelease(session, tests_path): session.run( "py.test", "--quiet", + "-W default::PendingDeprecationWarning", f"--junitxml={os.path.split(tests_path)[-1]}_prerelease_{session.python}_sponge_log.xml", "--cov=db_dtypes", "--cov=tests/unit", @@ -345,6 +347,7 @@ def system(session): session.run( "py.test", "--quiet", + "-W default::PendingDeprecationWarning", f"--junitxml=system_{session.python}_sponge_log.xml", system_test_path, *session.posargs, @@ -353,6 +356,7 @@ def system(session): session.run( "py.test", "--quiet", + "-W default::PendingDeprecationWarning", f"--junitxml=system_{session.python}_sponge_log.xml", system_test_folder_path, *session.posargs, @@ -529,6 +533,7 @@ def prerelease_deps(session): session.run( "py.test", "--verbose", + "-W default::PendingDeprecationWarning", f"--junitxml=system_{session.python}_sponge_log.xml", system_test_path, *session.posargs, @@ -537,6 +542,7 @@ def prerelease_deps(session): session.run( "py.test", "--verbose", + "-W default::PendingDeprecationWarning", f"--junitxml=system_{session.python}_sponge_log.xml", system_test_folder_path, *session.posargs, From fa7a425bc2a5254fb187c5feebc81739bd459604 Mon Sep 17 00:00:00 2001 From: Chelsea Lin <124939984+chelsea-lin@users.noreply.github.com> Date: Thu, 31 Oct 2024 15:29:05 -0700 Subject: [PATCH 168/210] fix: support dbjson type on pandas version 1.5 (#295) * fix: support JSONDtype on pandas version 1.5 * sets constraints-3.9 for pandas 1.5.3 * fix test cov * fix format * nit * fix lint --- packages/db-dtypes/db_dtypes/json.py | 42 ++++++++++++++----- .../db-dtypes/testing/constraints-3.9.txt | 6 +-- packages/db-dtypes/tests/unit/test_json.py | 16 +------ 3 files changed, 37 insertions(+), 27 deletions(-) diff --git a/packages/db-dtypes/db_dtypes/json.py b/packages/db-dtypes/db_dtypes/json.py index ed04b720919b..a00fe2b238c1 100644 --- a/packages/db-dtypes/db_dtypes/json.py +++ b/packages/db-dtypes/db_dtypes/json.py @@ -72,14 +72,25 @@ class JSONArray(arrays.ArrowExtensionArray): _dtype = JSONDtype() - def __init__(self, values, dtype=None, copy=False) -> None: + def __init__(self, values) -> None: + super().__init__(values) self._dtype = JSONDtype() if isinstance(values, pa.Array): - self._pa_array = pa.chunked_array([values]) + pa_data = pa.chunked_array([values]) elif isinstance(values, pa.ChunkedArray): - self._pa_array = values + pa_data = values else: - raise ValueError(f"Unsupported type '{type(values)}' for JSONArray") + raise NotImplementedError( + f"Unsupported type '{type(values)}' for JSONArray" + ) + + # Ensures compatibility with pandas version 1.5.3 + if hasattr(self, "_data"): + self._data = pa_data + elif hasattr(self, "_pa_array"): + self._pa_array = pa_data + else: + raise NotImplementedError(f"Unsupported pandas version: {pd.__version__}") @classmethod def _box_pa( @@ -111,7 +122,7 @@ def _box_pa_scalar(cls, value) -> pa.Scalar: def _box_pa_array(cls, value, copy: bool = False) -> pa.Array | pa.ChunkedArray: """Box value into a pyarrow Array or ChunkedArray.""" if isinstance(value, cls): - pa_array = value._pa_array + pa_array = value.pa_data else: value = [JSONArray._serialize_json(x) for x in value] pa_array = pa.array(value, type=cls._dtype.pyarrow_dtype, from_pandas=True) @@ -147,11 +158,22 @@ def dtype(self) -> JSONDtype: """An instance of JSONDtype""" return self._dtype + @property + def pa_data(self): + """An instance of stored pa data""" + # Ensures compatibility with pandas version 1.5.3 + if hasattr(self, "_data"): + return self._data + elif hasattr(self, "_pa_array"): + return self._pa_array + else: + raise NotImplementedError(f"Unsupported pandas version: {pd.__version__}") + def _cmp_method(self, other, op): if op.__name__ == "eq": - result = pyarrow.compute.equal(self._pa_array, self._box_pa(other)) + result = pyarrow.compute.equal(self.pa_data, self._box_pa(other)) elif op.__name__ == "ne": - result = pyarrow.compute.not_equal(self._pa_array, self._box_pa(other)) + result = pyarrow.compute.not_equal(self.pa_data, self._box_pa(other)) else: # Comparison is not a meaningful one. We don't want to support sorting by JSON columns. raise TypeError(f"{op.__name__} not supported for JSONArray") @@ -169,7 +191,7 @@ def __getitem__(self, item): else: # `check_array_indexer` should verify that the assertion hold true. assert item.dtype.kind == "b" - return type(self)(self._pa_array.filter(item)) + return type(self)(self.pa_data.filter(item)) elif isinstance(item, tuple): item = indexers.unpack_tuple_and_ellipses(item) @@ -181,7 +203,7 @@ def __getitem__(self, item): r"(`None`) and integer or boolean arrays are valid indices" ) - value = self._pa_array[item] + value = self.pa_data[item] if isinstance(value, pa.ChunkedArray): return type(self)(value) else: @@ -193,7 +215,7 @@ def __getitem__(self, item): def __iter__(self): """Iterate over elements of the array.""" - for value in self._pa_array: + for value in self.pa_data: val = JSONArray._deserialize_json(value.as_py()) if val is None: yield self._dtype.na_value diff --git a/packages/db-dtypes/testing/constraints-3.9.txt b/packages/db-dtypes/testing/constraints-3.9.txt index b9ab6bf3d530..470082585f6d 100644 --- a/packages/db-dtypes/testing/constraints-3.9.txt +++ b/packages/db-dtypes/testing/constraints-3.9.txt @@ -1,3 +1,3 @@ -# Make sure we test with pandas 1.3.0. The Python version isn't that relevant. -pandas==1.3.0 -numpy<2.0.0 +# Make sure we test with pandas 1.5.0. The Python version isn't that relevant. +pandas==1.5.3 +numpy==1.24.0 \ No newline at end of file diff --git a/packages/db-dtypes/tests/unit/test_json.py b/packages/db-dtypes/tests/unit/test_json.py index c48635d5a1bf..365bd8f68ddd 100644 --- a/packages/db-dtypes/tests/unit/test_json.py +++ b/packages/db-dtypes/tests/unit/test_json.py @@ -13,8 +13,6 @@ # limitations under the License. -import json - import pandas as pd import pytest @@ -78,18 +76,8 @@ def test_getitems_when_iter_with_null(): assert pd.isna(result) -def test_to_numpy(): - s = pd.Series(db_dtypes.JSONArray._from_sequence(JSON_DATA.values())) - data = s.to_numpy() - for id, key in enumerate(JSON_DATA.keys()): - if key == "null": - assert pd.isna(data[id]) - else: - assert data[id] == json.dumps(JSON_DATA[key], sort_keys=True) - - def test_deterministic_json_serialization(): x = {"a": 0, "b": 1} y = {"b": 1, "a": 0} - data = db_dtypes.JSONArray._from_sequence([x]) - assert y in data + data = db_dtypes.JSONArray._from_sequence([y]) + assert data[0] == x From f1dade4ad502ae1551658279bf900cda1eb3ca89 Mon Sep 17 00:00:00 2001 From: Chelsea Lin Date: Fri, 8 Nov 2024 05:47:53 -0800 Subject: [PATCH 169/210] fix: dbjson serialization with most compact JSON representation (#299) --- packages/db-dtypes/db_dtypes/json.py | 4 +++- .../tests/compliance/json/test_json_compliance.py | 12 ++++++++---- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/packages/db-dtypes/db_dtypes/json.py b/packages/db-dtypes/db_dtypes/json.py index a00fe2b238c1..d0bc6ca92a33 100644 --- a/packages/db-dtypes/db_dtypes/json.py +++ b/packages/db-dtypes/db_dtypes/json.py @@ -143,7 +143,9 @@ def _serialize_json(value): else: # `sort_keys=True` sorts dictionary keys before serialization, making # JSON comparisons deterministic. - return json.dumps(value, sort_keys=True) + # `separators=(',', ':')` eliminate whitespace to get the most compact + # JSON representation. + return json.dumps(value, sort_keys=True, separators=(",", ":")) @staticmethod def _deserialize_json(value): diff --git a/packages/db-dtypes/tests/compliance/json/test_json_compliance.py b/packages/db-dtypes/tests/compliance/json/test_json_compliance.py index 18610a0dd660..2a8e69ae338f 100644 --- a/packages/db-dtypes/tests/compliance/json/test_json_compliance.py +++ b/packages/db-dtypes/tests/compliance/json/test_json_compliance.py @@ -31,7 +31,8 @@ def test_astype_str(self, data): # Use `json.dumps(str)` instead of passing `str(obj)` directly to the super method. result = pd.Series(data[:5]).astype(str) expected = pd.Series( - [json.dumps(x, sort_keys=True) for x in data[:5]], dtype=str + [json.dumps(x, sort_keys=True, separators=(",", ":")) for x in data[:5]], + dtype=str, ) tm.assert_series_equal(result, expected) @@ -46,7 +47,7 @@ def test_astype_string(self, data, nullable_string_dtype): # Use `json.dumps(str)` instead of passing `str(obj)` directly to the super method. result = pd.Series(data[:5]).astype(nullable_string_dtype) expected = pd.Series( - [json.dumps(x, sort_keys=True) for x in data[:5]], + [json.dumps(x, sort_keys=True, separators=(",", ":")) for x in data[:5]], dtype=nullable_string_dtype, ) tm.assert_series_equal(result, expected) @@ -119,11 +120,14 @@ class TestJSONArrayInterface(base.BaseInterfaceTests): def test_array_interface(self, data): result = np.array(data) # Use `json.dumps(data[0])` instead of passing `data[0]` directly to the super method. - assert result[0] == json.dumps(data[0]) + assert result[0] == json.dumps(data[0], sort_keys=True, separators=(",", ":")) result = np.array(data, dtype=object) # Use `json.dumps(x)` instead of passing `x` directly to the super method. - expected = np.array([json.dumps(x) for x in data], dtype=object) + expected = np.array( + [json.dumps(x, sort_keys=True, separators=(",", ":")) for x in data], + dtype=object, + ) # if expected.ndim > 1: # # nested data, explicitly construct as 1D # expected = construct_1d_object_array_from_listlike(list(data)) From db917ab5c77fc878ad1811ed40cd658480971fc0 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 12 Nov 2024 18:50:43 +0800 Subject: [PATCH 170/210] build: use multiScm for Kokoro release builds (#294) Source-Link: https://github.com/googleapis/synthtool/commit/0da16589204e7f61911f64fcb30ac2d3b6e59b31 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:5cddfe2fb5019bbf78335bc55f15bc13e18354a56b3ff46e1834f8e540807f05 Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 4 +- .../db-dtypes/.github/release-trigger.yml | 1 + .../.kokoro/docker/docs/requirements.txt | 42 +- packages/db-dtypes/.kokoro/docs/common.cfg | 6 +- packages/db-dtypes/.kokoro/release.sh | 2 +- packages/db-dtypes/.kokoro/release/common.cfg | 8 +- packages/db-dtypes/.kokoro/requirements.txt | 610 +++++++++--------- .../.kokoro/samples/python3.13/common.cfg | 40 ++ .../.kokoro/samples/python3.13/continuous.cfg | 6 + .../samples/python3.13/periodic-head.cfg | 11 + .../.kokoro/samples/python3.13/periodic.cfg | 6 + .../.kokoro/samples/python3.13/presubmit.cfg | 6 + .../db-dtypes/.kokoro/test-samples-impl.sh | 3 +- packages/db-dtypes/CONTRIBUTING.rst | 6 +- .../db-dtypes/samples/snippets/noxfile.py | 2 +- 15 files changed, 396 insertions(+), 357 deletions(-) create mode 100644 packages/db-dtypes/.kokoro/samples/python3.13/common.cfg create mode 100644 packages/db-dtypes/.kokoro/samples/python3.13/continuous.cfg create mode 100644 packages/db-dtypes/.kokoro/samples/python3.13/periodic-head.cfg create mode 100644 packages/db-dtypes/.kokoro/samples/python3.13/periodic.cfg create mode 100644 packages/db-dtypes/.kokoro/samples/python3.13/presubmit.cfg diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 597e0c3261ca..7672b49b6307 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:e8dcfd7cbfd8beac3a3ff8d3f3185287ea0625d859168cc80faccfc9a7a00455 -# created: 2024-09-16T21:04:09.091105552Z + digest: sha256:5cddfe2fb5019bbf78335bc55f15bc13e18354a56b3ff46e1834f8e540807f05 +# created: 2024-10-31T01:41:07.349286254Z diff --git a/packages/db-dtypes/.github/release-trigger.yml b/packages/db-dtypes/.github/release-trigger.yml index d4ca94189e16..4bb79e58eadf 100644 --- a/packages/db-dtypes/.github/release-trigger.yml +++ b/packages/db-dtypes/.github/release-trigger.yml @@ -1 +1,2 @@ enabled: true +multiScmName: diff --git a/packages/db-dtypes/.kokoro/docker/docs/requirements.txt b/packages/db-dtypes/.kokoro/docker/docs/requirements.txt index 7129c7715594..66eacc82f041 100644 --- a/packages/db-dtypes/.kokoro/docker/docs/requirements.txt +++ b/packages/db-dtypes/.kokoro/docker/docs/requirements.txt @@ -4,39 +4,39 @@ # # pip-compile --allow-unsafe --generate-hashes requirements.in # -argcomplete==3.4.0 \ - --hash=sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5 \ - --hash=sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f +argcomplete==3.5.1 \ + --hash=sha256:1a1d148bdaa3e3b93454900163403df41448a248af01b6e849edc5ac08e6c363 \ + --hash=sha256:eb1ee355aa2557bd3d0145de7b06b2a45b0ce461e1e7813f5d066039ab4177b4 # via nox colorlog==6.8.2 \ --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33 # via nox -distlib==0.3.8 \ - --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ - --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 +distlib==0.3.9 \ + --hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \ + --hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403 # via virtualenv -filelock==3.15.4 \ - --hash=sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb \ - --hash=sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7 +filelock==3.16.1 \ + --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \ + --hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435 # via virtualenv -nox==2024.4.15 \ - --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \ - --hash=sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f +nox==2024.10.9 \ + --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ + --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 # via -r requirements.in packaging==24.1 \ --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 # via nox -platformdirs==4.2.2 \ - --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \ - --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3 +platformdirs==4.3.6 \ + --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ + --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb # via virtualenv -tomli==2.0.1 \ - --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ - --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f +tomli==2.0.2 \ + --hash=sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38 \ + --hash=sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed # via nox -virtualenv==20.26.3 \ - --hash=sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a \ - --hash=sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589 +virtualenv==20.26.6 \ + --hash=sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48 \ + --hash=sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2 # via nox diff --git a/packages/db-dtypes/.kokoro/docs/common.cfg b/packages/db-dtypes/.kokoro/docs/common.cfg index c790a9abfc0c..5d4f68dbfa7e 100644 --- a/packages/db-dtypes/.kokoro/docs/common.cfg +++ b/packages/db-dtypes/.kokoro/docs/common.cfg @@ -30,9 +30,9 @@ env_vars: { env_vars: { key: "V2_STAGING_BUCKET" - # Push non-cloud library docs to `docs-staging-v2-staging` instead of the + # Push non-cloud library docs to `docs-staging-v2-dev` instead of the # Cloud RAD bucket `docs-staging-v2` - value: "docs-staging-v2-staging" + value: "docs-staging-v2-dev" } # It will upload the docker image after successful builds. @@ -64,4 +64,4 @@ before_action { keyname: "docuploader_service_account" } } -} \ No newline at end of file +} diff --git a/packages/db-dtypes/.kokoro/release.sh b/packages/db-dtypes/.kokoro/release.sh index ca599fb84db2..6e5aac844acc 100755 --- a/packages/db-dtypes/.kokoro/release.sh +++ b/packages/db-dtypes/.kokoro/release.sh @@ -23,7 +23,7 @@ python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source / export PYTHONUNBUFFERED=1 # Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-2") +TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-3") cd github/python-db-dtypes-pandas python3 setup.py sdist bdist_wheel twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/packages/db-dtypes/.kokoro/release/common.cfg b/packages/db-dtypes/.kokoro/release/common.cfg index 8629d165c484..7b628ca09880 100644 --- a/packages/db-dtypes/.kokoro/release/common.cfg +++ b/packages/db-dtypes/.kokoro/release/common.cfg @@ -28,17 +28,11 @@ before_action { fetch_keystore { keystore_resource { keystore_config_id: 73713 - keyname: "google-cloud-pypi-token-keystore-2" + keyname: "google-cloud-pypi-token-keystore-3" } } } -# Tokens needed to report release status back to GitHub -env_vars: { - key: "SECRET_MANAGER_KEYS" - value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" -} - # Store the packages we uploaded to PyPI. That way, we have a record of exactly # what we published, which we can use to generate SBOMs and attestations. action { diff --git a/packages/db-dtypes/.kokoro/requirements.txt b/packages/db-dtypes/.kokoro/requirements.txt index 9622baf0ba38..006d8ef931bf 100644 --- a/packages/db-dtypes/.kokoro/requirements.txt +++ b/packages/db-dtypes/.kokoro/requirements.txt @@ -4,79 +4,94 @@ # # pip-compile --allow-unsafe --generate-hashes requirements.in # -argcomplete==3.4.0 \ - --hash=sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5 \ - --hash=sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f +argcomplete==3.5.1 \ + --hash=sha256:1a1d148bdaa3e3b93454900163403df41448a248af01b6e849edc5ac08e6c363 \ + --hash=sha256:eb1ee355aa2557bd3d0145de7b06b2a45b0ce461e1e7813f5d066039ab4177b4 # via nox -attrs==23.2.0 \ - --hash=sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30 \ - --hash=sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1 +attrs==24.2.0 \ + --hash=sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346 \ + --hash=sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2 # via gcp-releasetool backports-tarfile==1.2.0 \ --hash=sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34 \ --hash=sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991 # via jaraco-context -cachetools==5.3.3 \ - --hash=sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945 \ - --hash=sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105 +cachetools==5.5.0 \ + --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \ + --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a # via google-auth -certifi==2024.7.4 \ - --hash=sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b \ - --hash=sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90 +certifi==2024.8.30 \ + --hash=sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8 \ + --hash=sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9 # via requests -cffi==1.16.0 \ - --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ - --hash=sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a \ - --hash=sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417 \ - --hash=sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab \ - --hash=sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520 \ - --hash=sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36 \ - --hash=sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743 \ - --hash=sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8 \ - --hash=sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed \ - --hash=sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684 \ - --hash=sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56 \ - --hash=sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324 \ - --hash=sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d \ - --hash=sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235 \ - --hash=sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e \ - --hash=sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088 \ - --hash=sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000 \ - --hash=sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7 \ - --hash=sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e \ - --hash=sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673 \ - --hash=sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c \ - --hash=sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe \ - --hash=sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2 \ - --hash=sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098 \ - --hash=sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8 \ - --hash=sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a \ - --hash=sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0 \ - --hash=sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b \ - --hash=sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896 \ - --hash=sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e \ - --hash=sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9 \ - --hash=sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2 \ - --hash=sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b \ - --hash=sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6 \ - --hash=sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404 \ - --hash=sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f \ - --hash=sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0 \ - --hash=sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4 \ - --hash=sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc \ - --hash=sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936 \ - --hash=sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba \ - --hash=sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872 \ - --hash=sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb \ - --hash=sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614 \ - --hash=sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1 \ - --hash=sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d \ - --hash=sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969 \ - --hash=sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b \ - --hash=sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4 \ - --hash=sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627 \ - --hash=sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956 \ - --hash=sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357 +cffi==1.17.1 \ + --hash=sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8 \ + --hash=sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2 \ + --hash=sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1 \ + --hash=sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15 \ + --hash=sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36 \ + --hash=sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824 \ + --hash=sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8 \ + --hash=sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36 \ + --hash=sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17 \ + --hash=sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf \ + --hash=sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc \ + --hash=sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3 \ + --hash=sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed \ + --hash=sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702 \ + --hash=sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1 \ + --hash=sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8 \ + --hash=sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903 \ + --hash=sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6 \ + --hash=sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d \ + --hash=sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b \ + --hash=sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e \ + --hash=sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be \ + --hash=sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c \ + --hash=sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683 \ + --hash=sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9 \ + --hash=sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c \ + --hash=sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8 \ + --hash=sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1 \ + --hash=sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4 \ + --hash=sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655 \ + --hash=sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67 \ + --hash=sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595 \ + --hash=sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0 \ + --hash=sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65 \ + --hash=sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41 \ + --hash=sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6 \ + --hash=sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401 \ + --hash=sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6 \ + --hash=sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3 \ + --hash=sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16 \ + --hash=sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93 \ + --hash=sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e \ + --hash=sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4 \ + --hash=sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964 \ + --hash=sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c \ + --hash=sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576 \ + --hash=sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0 \ + --hash=sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3 \ + --hash=sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662 \ + --hash=sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3 \ + --hash=sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff \ + --hash=sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5 \ + --hash=sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd \ + --hash=sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f \ + --hash=sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5 \ + --hash=sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14 \ + --hash=sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d \ + --hash=sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9 \ + --hash=sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7 \ + --hash=sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382 \ + --hash=sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a \ + --hash=sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e \ + --hash=sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a \ + --hash=sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4 \ + --hash=sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99 \ + --hash=sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87 \ + --hash=sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b # via cryptography charset-normalizer==2.1.1 \ --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ @@ -97,72 +112,67 @@ colorlog==6.8.2 \ # via # gcp-docuploader # nox -cryptography==42.0.8 \ - --hash=sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad \ - --hash=sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583 \ - --hash=sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b \ - --hash=sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c \ - --hash=sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1 \ - --hash=sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648 \ - --hash=sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949 \ - --hash=sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba \ - --hash=sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c \ - --hash=sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9 \ - --hash=sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d \ - --hash=sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c \ - --hash=sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e \ - --hash=sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2 \ - --hash=sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d \ - --hash=sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7 \ - --hash=sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70 \ - --hash=sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2 \ - --hash=sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7 \ - --hash=sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14 \ - --hash=sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe \ - --hash=sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e \ - --hash=sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71 \ - --hash=sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961 \ - --hash=sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7 \ - --hash=sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c \ - --hash=sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28 \ - --hash=sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842 \ - --hash=sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902 \ - --hash=sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801 \ - --hash=sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a \ - --hash=sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e +cryptography==43.0.1 \ + --hash=sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494 \ + --hash=sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806 \ + --hash=sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d \ + --hash=sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062 \ + --hash=sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2 \ + --hash=sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4 \ + --hash=sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1 \ + --hash=sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85 \ + --hash=sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84 \ + --hash=sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042 \ + --hash=sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d \ + --hash=sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962 \ + --hash=sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2 \ + --hash=sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa \ + --hash=sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d \ + --hash=sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365 \ + --hash=sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96 \ + --hash=sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47 \ + --hash=sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d \ + --hash=sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d \ + --hash=sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c \ + --hash=sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb \ + --hash=sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277 \ + --hash=sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172 \ + --hash=sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034 \ + --hash=sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a \ + --hash=sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289 # via # -r requirements.in # gcp-releasetool # secretstorage -distlib==0.3.8 \ - --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ - --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 +distlib==0.3.9 \ + --hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \ + --hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403 # via virtualenv docutils==0.21.2 \ --hash=sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f \ --hash=sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2 # via readme-renderer -filelock==3.15.4 \ - --hash=sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb \ - --hash=sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7 +filelock==3.16.1 \ + --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \ + --hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435 # via virtualenv gcp-docuploader==0.6.5 \ --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea # via -r requirements.in -gcp-releasetool==2.0.1 \ - --hash=sha256:34314a910c08e8911d9c965bd44f8f2185c4f556e737d719c33a41f6a610de96 \ - --hash=sha256:b0d5863c6a070702b10883d37c4bdfd74bf930fe417f36c0c965d3b7c779ae62 +gcp-releasetool==2.1.1 \ + --hash=sha256:25639269f4eae510094f9dbed9894977e1966933211eb155a451deebc3fc0b30 \ + --hash=sha256:845f4ded3d9bfe8cc7fdaad789e83f4ea014affa77785259a7ddac4b243e099e # via -r requirements.in -google-api-core==2.19.1 \ - --hash=sha256:f12a9b8309b5e21d92483bbd47ce2c445861ec7d269ef6784ecc0ea8c1fa6125 \ - --hash=sha256:f4695f1e3650b316a795108a76a1c416e6afb036199d1c1f1f110916df479ffd +google-api-core==2.21.0 \ + --hash=sha256:4a152fd11a9f774ea606388d423b68aa7e6d6a0ffe4c8266f74979613ec09f81 \ + --hash=sha256:6869eacb2a37720380ba5898312af79a4d30b8bca1548fb4093e0697dc4bdf5d # via # google-cloud-core # google-cloud-storage -google-auth==2.31.0 \ - --hash=sha256:042c4702efa9f7d3c48d3a69341c209381b125faa6dbf3ebe56bc7e40ae05c23 \ - --hash=sha256:87805c36970047247c8afe614d4e3af8eceafc1ebba0c679fe75ddd1d575e871 +google-auth==2.35.0 \ + --hash=sha256:25df55f327ef021de8be50bad0dfd4a916ad0de96da86cd05661c9297723ad3f \ + --hash=sha256:f4c64ed4e01e8e8b646ef34c018f8bf3338df0c8e37d8b3bba40e7f574a3278a # via # gcp-releasetool # google-api-core @@ -172,97 +182,56 @@ google-cloud-core==2.4.1 \ --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \ --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61 # via google-cloud-storage -google-cloud-storage==2.17.0 \ - --hash=sha256:49378abff54ef656b52dca5ef0f2eba9aa83dc2b2c72c78714b03a1a95fe9388 \ - --hash=sha256:5b393bc766b7a3bc6f5407b9e665b2450d36282614b7945e570b3480a456d1e1 +google-cloud-storage==2.18.2 \ + --hash=sha256:97a4d45c368b7d401ed48c4fdfe86e1e1cb96401c9e199e419d289e2c0370166 \ + --hash=sha256:aaf7acd70cdad9f274d29332673fcab98708d0e1f4dceb5a5356aaef06af4d99 # via gcp-docuploader -google-crc32c==1.5.0 \ - --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ - --hash=sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876 \ - --hash=sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c \ - --hash=sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289 \ - --hash=sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298 \ - --hash=sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02 \ - --hash=sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f \ - --hash=sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2 \ - --hash=sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a \ - --hash=sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb \ - --hash=sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210 \ - --hash=sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5 \ - --hash=sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee \ - --hash=sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c \ - --hash=sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a \ - --hash=sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314 \ - --hash=sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd \ - --hash=sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65 \ - --hash=sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37 \ - --hash=sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4 \ - --hash=sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13 \ - --hash=sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894 \ - --hash=sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31 \ - --hash=sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e \ - --hash=sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709 \ - --hash=sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740 \ - --hash=sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc \ - --hash=sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d \ - --hash=sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c \ - --hash=sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c \ - --hash=sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d \ - --hash=sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906 \ - --hash=sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61 \ - --hash=sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57 \ - --hash=sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c \ - --hash=sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a \ - --hash=sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438 \ - --hash=sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946 \ - --hash=sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7 \ - --hash=sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96 \ - --hash=sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091 \ - --hash=sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae \ - --hash=sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d \ - --hash=sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88 \ - --hash=sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2 \ - --hash=sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd \ - --hash=sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541 \ - --hash=sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728 \ - --hash=sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178 \ - --hash=sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968 \ - --hash=sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346 \ - --hash=sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8 \ - --hash=sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93 \ - --hash=sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7 \ - --hash=sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273 \ - --hash=sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462 \ - --hash=sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94 \ - --hash=sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd \ - --hash=sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e \ - --hash=sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57 \ - --hash=sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b \ - --hash=sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9 \ - --hash=sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a \ - --hash=sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100 \ - --hash=sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325 \ - --hash=sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183 \ - --hash=sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556 \ - --hash=sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4 +google-crc32c==1.6.0 \ + --hash=sha256:05e2d8c9a2f853ff116db9706b4a27350587f341eda835f46db3c0a8c8ce2f24 \ + --hash=sha256:18e311c64008f1f1379158158bb3f0c8d72635b9eb4f9545f8cf990c5668e59d \ + --hash=sha256:236c87a46cdf06384f614e9092b82c05f81bd34b80248021f729396a78e55d7e \ + --hash=sha256:35834855408429cecf495cac67ccbab802de269e948e27478b1e47dfb6465e57 \ + --hash=sha256:386122eeaaa76951a8196310432c5b0ef3b53590ef4c317ec7588ec554fec5d2 \ + --hash=sha256:40b05ab32a5067525670880eb5d169529089a26fe35dce8891127aeddc1950e8 \ + --hash=sha256:48abd62ca76a2cbe034542ed1b6aee851b6f28aaca4e6551b5599b6f3ef175cc \ + --hash=sha256:50cf2a96da226dcbff8671233ecf37bf6e95de98b2a2ebadbfdf455e6d05df42 \ + --hash=sha256:51c4f54dd8c6dfeb58d1df5e4f7f97df8abf17a36626a217f169893d1d7f3e9f \ + --hash=sha256:5bcc90b34df28a4b38653c36bb5ada35671ad105c99cfe915fb5bed7ad6924aa \ + --hash=sha256:62f6d4a29fea082ac4a3c9be5e415218255cf11684ac6ef5488eea0c9132689b \ + --hash=sha256:6eceb6ad197656a1ff49ebfbbfa870678c75be4344feb35ac1edf694309413dc \ + --hash=sha256:7aec8e88a3583515f9e0957fe4f5f6d8d4997e36d0f61624e70469771584c760 \ + --hash=sha256:91ca8145b060679ec9176e6de4f89b07363d6805bd4760631ef254905503598d \ + --hash=sha256:a184243544811e4a50d345838a883733461e67578959ac59964e43cca2c791e7 \ + --hash=sha256:a9e4b426c3702f3cd23b933436487eb34e01e00327fac20c9aebb68ccf34117d \ + --hash=sha256:bb0966e1c50d0ef5bc743312cc730b533491d60585a9a08f897274e57c3f70e0 \ + --hash=sha256:bb8b3c75bd157010459b15222c3fd30577042a7060e29d42dabce449c087f2b3 \ + --hash=sha256:bd5e7d2445d1a958c266bfa5d04c39932dc54093fa391736dbfdb0f1929c1fb3 \ + --hash=sha256:c87d98c7c4a69066fd31701c4e10d178a648c2cac3452e62c6b24dc51f9fcc00 \ + --hash=sha256:d2952396dc604544ea7476b33fe87faedc24d666fb0c2d5ac971a2b9576ab871 \ + --hash=sha256:d8797406499f28b5ef791f339594b0b5fdedf54e203b5066675c406ba69d705c \ + --hash=sha256:d9e9913f7bd69e093b81da4535ce27af842e7bf371cde42d1ae9e9bd382dc0e9 \ + --hash=sha256:e2806553238cd076f0a55bddab37a532b53580e699ed8e5606d0de1f856b5205 \ + --hash=sha256:ebab974b1687509e5c973b5c4b8b146683e101e102e17a86bd196ecaa4d099fc \ + --hash=sha256:ed767bf4ba90104c1216b68111613f0d5926fb3780660ea1198fc469af410e9d \ + --hash=sha256:f7a1fc29803712f80879b0806cb83ab24ce62fc8daf0569f2204a0cfd7f68ed4 # via # google-cloud-storage # google-resumable-media -google-resumable-media==2.7.1 \ - --hash=sha256:103ebc4ba331ab1bfdac0250f8033627a2cd7cde09e7ccff9181e31ba4315b2c \ - --hash=sha256:eae451a7b2e2cdbaaa0fd2eb00cc8a1ee5e95e16b55597359cbc3d27d7d90e33 +google-resumable-media==2.7.2 \ + --hash=sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa \ + --hash=sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0 # via google-cloud-storage -googleapis-common-protos==1.63.2 \ - --hash=sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945 \ - --hash=sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87 +googleapis-common-protos==1.65.0 \ + --hash=sha256:2972e6c496f435b92590fd54045060867f3fe9be2c82ab148fc8885035479a63 \ + --hash=sha256:334a29d07cddc3aa01dee4988f9afd9b2916ee2ff49d6b757155dc0d197852c0 # via google-api-core -idna==3.7 \ - --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \ - --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 +idna==3.10 \ + --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ + --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 # via requests -importlib-metadata==8.0.0 \ - --hash=sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f \ - --hash=sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812 +importlib-metadata==8.5.0 \ + --hash=sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b \ + --hash=sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7 # via # -r requirements.in # keyring @@ -271,13 +240,13 @@ jaraco-classes==3.4.0 \ --hash=sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd \ --hash=sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790 # via keyring -jaraco-context==5.3.0 \ - --hash=sha256:3e16388f7da43d384a1a7cd3452e72e14732ac9fe459678773a3608a812bf266 \ - --hash=sha256:c2f67165ce1f9be20f32f650f25d8edfc1646a8aeee48ae06fb35f90763576d2 +jaraco-context==6.0.1 \ + --hash=sha256:9bae4ea555cf0b14938dc0aee7c9f32ed303aa20a3b73e7dc80111628792d1b3 \ + --hash=sha256:f797fc481b490edb305122c9181830a3a5b76d84ef6d1aef2fb9b47ab956f9e4 # via keyring -jaraco-functools==4.0.1 \ - --hash=sha256:3b24ccb921d6b593bdceb56ce14799204f473976e2a9d4b15b04d0f2c2326664 \ - --hash=sha256:d33fa765374c0611b52f8b3a795f8900869aa88c84769d4d1746cd68fb28c3e8 +jaraco-functools==4.1.0 \ + --hash=sha256:70f7e0e2ae076498e212562325e805204fc092d7b4c17e0e86c959e249701a9d \ + --hash=sha256:ad159f13428bc4acbf5541ad6dec511f91573b90fba04df61dafa2a1231cf649 # via keyring jeepney==0.8.0 \ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ @@ -289,9 +258,9 @@ jinja2==3.1.4 \ --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \ --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d # via gcp-releasetool -keyring==25.2.1 \ - --hash=sha256:2458681cdefc0dbc0b7eb6cf75d0b98e59f9ad9b2d4edd319d18f68bdca95e50 \ - --hash=sha256:daaffd42dbda25ddafb1ad5fec4024e5bbcfe424597ca1ca452b299861e49f1b +keyring==25.4.1 \ + --hash=sha256:5426f817cf7f6f007ba5ec722b1bcad95a75b27d780343772ad76b17cb47b0bf \ + --hash=sha256:b07ebc55f3e8ed86ac81dd31ef14e81ace9dd9c3d4b5d77a6e9a2016d0d71a1b # via # gcp-releasetool # twine @@ -299,75 +268,76 @@ markdown-it-py==3.0.0 \ --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb # via rich -markupsafe==2.1.5 \ - --hash=sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf \ - --hash=sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff \ - --hash=sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f \ - --hash=sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3 \ - --hash=sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532 \ - --hash=sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f \ - --hash=sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617 \ - --hash=sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df \ - --hash=sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4 \ - --hash=sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906 \ - --hash=sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f \ - --hash=sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4 \ - --hash=sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8 \ - --hash=sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371 \ - --hash=sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2 \ - --hash=sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465 \ - --hash=sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52 \ - --hash=sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6 \ - --hash=sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169 \ - --hash=sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad \ - --hash=sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2 \ - --hash=sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0 \ - --hash=sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029 \ - --hash=sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f \ - --hash=sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a \ - --hash=sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced \ - --hash=sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5 \ - --hash=sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c \ - --hash=sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf \ - --hash=sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9 \ - --hash=sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb \ - --hash=sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad \ - --hash=sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3 \ - --hash=sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1 \ - --hash=sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46 \ - --hash=sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc \ - --hash=sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a \ - --hash=sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee \ - --hash=sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900 \ - --hash=sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5 \ - --hash=sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea \ - --hash=sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f \ - --hash=sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5 \ - --hash=sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e \ - --hash=sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a \ - --hash=sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f \ - --hash=sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50 \ - --hash=sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a \ - --hash=sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b \ - --hash=sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4 \ - --hash=sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff \ - --hash=sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2 \ - --hash=sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46 \ - --hash=sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b \ - --hash=sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf \ - --hash=sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5 \ - --hash=sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5 \ - --hash=sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab \ - --hash=sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd \ - --hash=sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68 +markupsafe==3.0.1 \ + --hash=sha256:0778de17cff1acaeccc3ff30cd99a3fd5c50fc58ad3d6c0e0c4c58092b859396 \ + --hash=sha256:0f84af7e813784feb4d5e4ff7db633aba6c8ca64a833f61d8e4eade234ef0c38 \ + --hash=sha256:17b2aea42a7280db02ac644db1d634ad47dcc96faf38ab304fe26ba2680d359a \ + --hash=sha256:242d6860f1fd9191aef5fae22b51c5c19767f93fb9ead4d21924e0bcb17619d8 \ + --hash=sha256:244dbe463d5fb6d7ce161301a03a6fe744dac9072328ba9fc82289238582697b \ + --hash=sha256:26627785a54a947f6d7336ce5963569b5d75614619e75193bdb4e06e21d447ad \ + --hash=sha256:2a4b34a8d14649315c4bc26bbfa352663eb51d146e35eef231dd739d54a5430a \ + --hash=sha256:2ae99f31f47d849758a687102afdd05bd3d3ff7dbab0a8f1587981b58a76152a \ + --hash=sha256:312387403cd40699ab91d50735ea7a507b788091c416dd007eac54434aee51da \ + --hash=sha256:3341c043c37d78cc5ae6e3e305e988532b072329639007fd408a476642a89fd6 \ + --hash=sha256:33d1c36b90e570ba7785dacd1faaf091203d9942bc036118fab8110a401eb1a8 \ + --hash=sha256:3e683ee4f5d0fa2dde4db77ed8dd8a876686e3fc417655c2ece9a90576905344 \ + --hash=sha256:3ffb4a8e7d46ed96ae48805746755fadd0909fea2306f93d5d8233ba23dda12a \ + --hash=sha256:40621d60d0e58aa573b68ac5e2d6b20d44392878e0bfc159012a5787c4e35bc8 \ + --hash=sha256:40f1e10d51c92859765522cbd79c5c8989f40f0419614bcdc5015e7b6bf97fc5 \ + --hash=sha256:45d42d132cff577c92bfba536aefcfea7e26efb975bd455db4e6602f5c9f45e7 \ + --hash=sha256:48488d999ed50ba8d38c581d67e496f955821dc183883550a6fbc7f1aefdc170 \ + --hash=sha256:4935dd7883f1d50e2ffecca0aa33dc1946a94c8f3fdafb8df5c330e48f71b132 \ + --hash=sha256:4c2d64fdba74ad16138300815cfdc6ab2f4647e23ced81f59e940d7d4a1469d9 \ + --hash=sha256:4c8817557d0de9349109acb38b9dd570b03cc5014e8aabf1cbddc6e81005becd \ + --hash=sha256:4ffaaac913c3f7345579db4f33b0020db693f302ca5137f106060316761beea9 \ + --hash=sha256:5a4cb365cb49b750bdb60b846b0c0bc49ed62e59a76635095a179d440540c346 \ + --hash=sha256:62fada2c942702ef8952754abfc1a9f7658a4d5460fabe95ac7ec2cbe0d02abc \ + --hash=sha256:67c519635a4f64e495c50e3107d9b4075aec33634272b5db1cde839e07367589 \ + --hash=sha256:6a54c43d3ec4cf2a39f4387ad044221c66a376e58c0d0e971d47c475ba79c6b5 \ + --hash=sha256:7044312a928a66a4c2a22644147bc61a199c1709712069a344a3fb5cfcf16915 \ + --hash=sha256:730d86af59e0e43ce277bb83970530dd223bf7f2a838e086b50affa6ec5f9295 \ + --hash=sha256:800100d45176652ded796134277ecb13640c1a537cad3b8b53da45aa96330453 \ + --hash=sha256:80fcbf3add8790caddfab6764bde258b5d09aefbe9169c183f88a7410f0f6dea \ + --hash=sha256:82b5dba6eb1bcc29cc305a18a3c5365d2af06ee71b123216416f7e20d2a84e5b \ + --hash=sha256:852dc840f6d7c985603e60b5deaae1d89c56cb038b577f6b5b8c808c97580f1d \ + --hash=sha256:8ad4ad1429cd4f315f32ef263c1342166695fad76c100c5d979c45d5570ed58b \ + --hash=sha256:8ae369e84466aa70f3154ee23c1451fda10a8ee1b63923ce76667e3077f2b0c4 \ + --hash=sha256:93e8248d650e7e9d49e8251f883eed60ecbc0e8ffd6349e18550925e31bd029b \ + --hash=sha256:973a371a55ce9ed333a3a0f8e0bcfae9e0d637711534bcb11e130af2ab9334e7 \ + --hash=sha256:9ba25a71ebf05b9bb0e2ae99f8bc08a07ee8e98c612175087112656ca0f5c8bf \ + --hash=sha256:a10860e00ded1dd0a65b83e717af28845bb7bd16d8ace40fe5531491de76b79f \ + --hash=sha256:a4792d3b3a6dfafefdf8e937f14906a51bd27025a36f4b188728a73382231d91 \ + --hash=sha256:a7420ceda262dbb4b8d839a4ec63d61c261e4e77677ed7c66c99f4e7cb5030dd \ + --hash=sha256:ad91738f14eb8da0ff82f2acd0098b6257621410dcbd4df20aaa5b4233d75a50 \ + --hash=sha256:b6a387d61fe41cdf7ea95b38e9af11cfb1a63499af2759444b99185c4ab33f5b \ + --hash=sha256:b954093679d5750495725ea6f88409946d69cfb25ea7b4c846eef5044194f583 \ + --hash=sha256:bbde71a705f8e9e4c3e9e33db69341d040c827c7afa6789b14c6e16776074f5a \ + --hash=sha256:beeebf760a9c1f4c07ef6a53465e8cfa776ea6a2021eda0d0417ec41043fe984 \ + --hash=sha256:c91b394f7601438ff79a4b93d16be92f216adb57d813a78be4446fe0f6bc2d8c \ + --hash=sha256:c97ff7fedf56d86bae92fa0a646ce1a0ec7509a7578e1ed238731ba13aabcd1c \ + --hash=sha256:cb53e2a99df28eee3b5f4fea166020d3ef9116fdc5764bc5117486e6d1211b25 \ + --hash=sha256:cbf445eb5628981a80f54087f9acdbf84f9b7d862756110d172993b9a5ae81aa \ + --hash=sha256:d06b24c686a34c86c8c1fba923181eae6b10565e4d80bdd7bc1c8e2f11247aa4 \ + --hash=sha256:d98e66a24497637dd31ccab090b34392dddb1f2f811c4b4cd80c230205c074a3 \ + --hash=sha256:db15ce28e1e127a0013dfb8ac243a8e392db8c61eae113337536edb28bdc1f97 \ + --hash=sha256:db842712984e91707437461930e6011e60b39136c7331e971952bb30465bc1a1 \ + --hash=sha256:e24bfe89c6ac4c31792793ad9f861b8f6dc4546ac6dc8f1c9083c7c4f2b335cd \ + --hash=sha256:e81c52638315ff4ac1b533d427f50bc0afc746deb949210bc85f05d4f15fd772 \ + --hash=sha256:e9393357f19954248b00bed7c56f29a25c930593a77630c719653d51e7669c2a \ + --hash=sha256:ee3941769bd2522fe39222206f6dd97ae83c442a94c90f2b7a25d847d40f4729 \ + --hash=sha256:f31ae06f1328595d762c9a2bf29dafd8621c7d3adc130cbb46278079758779ca \ + --hash=sha256:f94190df587738280d544971500b9cafc9b950d32efcb1fba9ac10d84e6aa4e6 \ + --hash=sha256:fa7d686ed9883f3d664d39d5a8e74d3c5f63e603c2e3ff0abcba23eac6542635 \ + --hash=sha256:fb532dd9900381d2e8f48172ddc5a59db4c445a11b9fab40b3b786da40d3b56b \ + --hash=sha256:fe32482b37b4b00c7a52a07211b479653b7fe4f22b2e481b9a9b099d8a430f2f # via jinja2 mdurl==0.1.2 \ --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba # via markdown-it-py -more-itertools==10.3.0 \ - --hash=sha256:e5d93ef411224fbcef366a6e8ddc4c5781bc6359d43412a65dd5964e46111463 \ - --hash=sha256:ea6a02e24a9161e51faad17a8782b92a0df82c12c1c8886fec7f0c3fa1a1b320 +more-itertools==10.5.0 \ + --hash=sha256:037b0d3203ce90cca8ab1defbbdac29d5f993fc20131f3664dc8d6acfa872aef \ + --hash=sha256:5482bfef7849c25dc3c6dd53a6173ae4795da2a41a80faea6700d9f5846c5da6 # via # jaraco-classes # jaraco-functools @@ -389,9 +359,9 @@ nh3==0.2.18 \ --hash=sha256:de3ceed6e661954871d6cd78b410213bdcb136f79aafe22aa7182e028b8c7307 \ --hash=sha256:f0eca9ca8628dbb4e916ae2491d72957fdd35f7a5d326b7032a345f111ac07fe # via readme-renderer -nox==2024.4.15 \ - --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \ - --hash=sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f +nox==2024.10.9 \ + --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ + --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 # via -r requirements.in packaging==24.1 \ --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ @@ -403,41 +373,41 @@ pkginfo==1.10.0 \ --hash=sha256:5df73835398d10db79f8eecd5cd86b1f6d29317589ea70796994d49399af6297 \ --hash=sha256:889a6da2ed7ffc58ab5b900d888ddce90bce912f2d2de1dc1c26f4cb9fe65097 # via twine -platformdirs==4.2.2 \ - --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \ - --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3 +platformdirs==4.3.6 \ + --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ + --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb # via virtualenv proto-plus==1.24.0 \ --hash=sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445 \ --hash=sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12 # via google-api-core -protobuf==5.27.2 \ - --hash=sha256:0e341109c609749d501986b835f667c6e1e24531096cff9d34ae411595e26505 \ - --hash=sha256:176c12b1f1c880bf7a76d9f7c75822b6a2bc3db2d28baa4d300e8ce4cde7409b \ - --hash=sha256:354d84fac2b0d76062e9b3221f4abbbacdfd2a4d8af36bab0474f3a0bb30ab38 \ - --hash=sha256:4fadd8d83e1992eed0248bc50a4a6361dc31bcccc84388c54c86e530b7f58863 \ - --hash=sha256:54330f07e4949d09614707c48b06d1a22f8ffb5763c159efd5c0928326a91470 \ - --hash=sha256:610e700f02469c4a997e58e328cac6f305f649826853813177e6290416e846c6 \ - --hash=sha256:7fc3add9e6003e026da5fc9e59b131b8f22b428b991ccd53e2af8071687b4fce \ - --hash=sha256:9e8f199bf7f97bd7ecebffcae45ebf9527603549b2b562df0fbc6d4d688f14ca \ - --hash=sha256:a109916aaac42bff84702fb5187f3edadbc7c97fc2c99c5ff81dd15dcce0d1e5 \ - --hash=sha256:b848dbe1d57ed7c191dfc4ea64b8b004a3f9ece4bf4d0d80a367b76df20bf36e \ - --hash=sha256:f3ecdef226b9af856075f28227ff2c90ce3a594d092c39bee5513573f25e2714 +protobuf==5.28.2 \ + --hash=sha256:2c69461a7fcc8e24be697624c09a839976d82ae75062b11a0972e41fd2cd9132 \ + --hash=sha256:35cfcb15f213449af7ff6198d6eb5f739c37d7e4f1c09b5d0641babf2cc0c68f \ + --hash=sha256:52235802093bd8a2811abbe8bf0ab9c5f54cca0a751fdd3f6ac2a21438bffece \ + --hash=sha256:59379674ff119717404f7454647913787034f03fe7049cbef1d74a97bb4593f0 \ + --hash=sha256:5e8a95246d581eef20471b5d5ba010d55f66740942b95ba9b872d918c459452f \ + --hash=sha256:87317e9bcda04a32f2ee82089a204d3a2f0d3c8aeed16568c7daf4756e4f1fe0 \ + --hash=sha256:8ddc60bf374785fb7cb12510b267f59067fa10087325b8e1855b898a0d81d276 \ + --hash=sha256:a8b9403fc70764b08d2f593ce44f1d2920c5077bf7d311fefec999f8c40f78b7 \ + --hash=sha256:c0ea0123dac3399a2eeb1a1443d82b7afc9ff40241433296769f7da42d142ec3 \ + --hash=sha256:ca53faf29896c526863366a52a8f4d88e69cd04ec9571ed6082fa117fac3ab36 \ + --hash=sha256:eeea10f3dc0ac7e6b4933d32db20662902b4ab81bf28df12218aa389e9c2102d # via # gcp-docuploader # gcp-releasetool # google-api-core # googleapis-common-protos # proto-plus -pyasn1==0.6.0 \ - --hash=sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c \ - --hash=sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473 +pyasn1==0.6.1 \ + --hash=sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629 \ + --hash=sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034 # via # pyasn1-modules # rsa -pyasn1-modules==0.4.0 \ - --hash=sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6 \ - --hash=sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b +pyasn1-modules==0.4.1 \ + --hash=sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd \ + --hash=sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c # via google-auth pycparser==2.22 \ --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \ @@ -449,9 +419,9 @@ pygments==2.18.0 \ # via # readme-renderer # rich -pyjwt==2.8.0 \ - --hash=sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de \ - --hash=sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320 +pyjwt==2.9.0 \ + --hash=sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850 \ + --hash=sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c # via gcp-releasetool pyperclip==1.9.0 \ --hash=sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310 @@ -481,9 +451,9 @@ rfc3986==2.0.0 \ --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c # via twine -rich==13.7.1 \ - --hash=sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222 \ - --hash=sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432 +rich==13.9.2 \ + --hash=sha256:51a2c62057461aaf7152b4d611168f93a9fc73068f8ded2790f29fe2b5366d0c \ + --hash=sha256:8c82a3d3f8dcfe9e734771313e606b39d8247bb6b826e196f4914b333b743cf1 # via twine rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ @@ -499,9 +469,9 @@ six==1.16.0 \ # via # gcp-docuploader # python-dateutil -tomli==2.0.1 \ - --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ - --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f +tomli==2.0.2 \ + --hash=sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38 \ + --hash=sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed # via nox twine==5.1.1 \ --hash=sha256:215dbe7b4b94c2c50a7315c0275d2258399280fbb7d04182c7e55e24b5f93997 \ @@ -510,28 +480,30 @@ twine==5.1.1 \ typing-extensions==4.12.2 \ --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 - # via -r requirements.in -urllib3==2.2.2 \ - --hash=sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472 \ - --hash=sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168 + # via + # -r requirements.in + # rich +urllib3==2.2.3 \ + --hash=sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac \ + --hash=sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9 # via # requests # twine -virtualenv==20.26.3 \ - --hash=sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a \ - --hash=sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589 +virtualenv==20.26.6 \ + --hash=sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48 \ + --hash=sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2 # via nox -wheel==0.43.0 \ - --hash=sha256:465ef92c69fa5c5da2d1cf8ac40559a8c940886afcef87dcf14b9470862f1d85 \ - --hash=sha256:55c570405f142630c6b9f72fe09d9b67cf1477fcf543ae5b8dcb1f5b7377da81 +wheel==0.44.0 \ + --hash=sha256:2376a90c98cc337d18623527a97c31797bd02bad0033d41547043a1cbfbe448f \ + --hash=sha256:a29c3f2817e95ab89aa4660681ad547c0e9547f20e75b0562fe7723c9a2a9d49 # via -r requirements.in -zipp==3.19.2 \ - --hash=sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19 \ - --hash=sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c +zipp==3.20.2 \ + --hash=sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350 \ + --hash=sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==70.2.0 \ - --hash=sha256:b8b8060bb426838fbe942479c90296ce976249451118ef566a5a0b7d8b78fb05 \ - --hash=sha256:bd63e505105011b25c3c11f753f7e3b8465ea739efddaccef8f0efac2137bac1 +setuptools==75.1.0 \ + --hash=sha256:35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2 \ + --hash=sha256:d59a21b17a275fb872a9c3dae73963160ae079f1049ed956880cd7c09b120538 # via -r requirements.in diff --git a/packages/db-dtypes/.kokoro/samples/python3.13/common.cfg b/packages/db-dtypes/.kokoro/samples/python3.13/common.cfg new file mode 100644 index 000000000000..d81f6eed8552 --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/python3.13/common.cfg @@ -0,0 +1,40 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.13" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-313" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-db-dtypes-pandas/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-db-dtypes-pandas/.kokoro/trampoline_v2.sh" diff --git a/packages/db-dtypes/.kokoro/samples/python3.13/continuous.cfg b/packages/db-dtypes/.kokoro/samples/python3.13/continuous.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/python3.13/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.13/periodic-head.cfg b/packages/db-dtypes/.kokoro/samples/python3.13/periodic-head.cfg new file mode 100644 index 000000000000..ee3d56408db9 --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/python3.13/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-db-dtypes-pandas/.kokoro/test-samples-against-head.sh" +} diff --git a/packages/db-dtypes/.kokoro/samples/python3.13/periodic.cfg b/packages/db-dtypes/.kokoro/samples/python3.13/periodic.cfg new file mode 100644 index 000000000000..71cd1e597e38 --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/python3.13/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/packages/db-dtypes/.kokoro/samples/python3.13/presubmit.cfg b/packages/db-dtypes/.kokoro/samples/python3.13/presubmit.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/db-dtypes/.kokoro/samples/python3.13/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/test-samples-impl.sh b/packages/db-dtypes/.kokoro/test-samples-impl.sh index 55910c8ba178..53e365bc4e79 100755 --- a/packages/db-dtypes/.kokoro/test-samples-impl.sh +++ b/packages/db-dtypes/.kokoro/test-samples-impl.sh @@ -33,7 +33,8 @@ export PYTHONUNBUFFERED=1 env | grep KOKORO # Install nox -python3.9 -m pip install --upgrade --quiet nox +# `virtualenv==20.26.6` is added for Python 3.7 compatibility +python3.9 -m pip install --upgrade --quiet nox virtualenv==20.26.6 # Use secrets acessor service account to get secrets if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then diff --git a/packages/db-dtypes/CONTRIBUTING.rst b/packages/db-dtypes/CONTRIBUTING.rst index 951fc7a42f85..0bda74ace1e4 100644 --- a/packages/db-dtypes/CONTRIBUTING.rst +++ b/packages/db-dtypes/CONTRIBUTING.rst @@ -22,7 +22,7 @@ In order to add a feature: documentation. - The feature must work fully on the following CPython versions: - 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12 on both UNIX and Windows. + 3.7, 3.8, 3.9, 3.10, 3.11, 3.12 and 3.13 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -72,7 +72,7 @@ We use `nox `__ to instrument our tests. - To run a single unit test:: - $ nox -s unit-3.12 -- -k + $ nox -s unit-3.13 -- -k .. note:: @@ -227,6 +227,7 @@ We support: - `Python 3.10`_ - `Python 3.11`_ - `Python 3.12`_ +- `Python 3.13`_ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ @@ -234,6 +235,7 @@ We support: .. _Python 3.10: https://docs.python.org/3.10/ .. _Python 3.11: https://docs.python.org/3.11/ .. _Python 3.12: https://docs.python.org/3.12/ +.. _Python 3.13: https://docs.python.org/3.13/ Supported versions can be found in our ``noxfile.py`` `config`_. diff --git a/packages/db-dtypes/samples/snippets/noxfile.py b/packages/db-dtypes/samples/snippets/noxfile.py index 3b7135946fd5..c9a3d1ecbf2a 100644 --- a/packages/db-dtypes/samples/snippets/noxfile.py +++ b/packages/db-dtypes/samples/snippets/noxfile.py @@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] From f80585a50d5870fbdc731b5869c72745f3468fcb Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 12 Nov 2024 18:59:33 +0800 Subject: [PATCH 171/210] chore(python): remove obsolete release scripts and config files (#300) Source-Link: https://github.com/googleapis/synthtool/commit/635751753776b1a7cabd4dcaa48013a96274372d Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:91d0075c6f2fd6a073a06168feee19fa2a8507692f2519a1dc7de3366d157e99 Co-authored-by: Owl Bot Co-authored-by: Lingqing Gan --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 4 ++-- packages/db-dtypes/.github/release-trigger.yml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 7672b49b6307..b2770d4e0379 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:5cddfe2fb5019bbf78335bc55f15bc13e18354a56b3ff46e1834f8e540807f05 -# created: 2024-10-31T01:41:07.349286254Z + digest: sha256:91d0075c6f2fd6a073a06168feee19fa2a8507692f2519a1dc7de3366d157e99 +# created: 2024-11-11T16:13:09.302418532Z diff --git a/packages/db-dtypes/.github/release-trigger.yml b/packages/db-dtypes/.github/release-trigger.yml index 4bb79e58eadf..134780d1ab6f 100644 --- a/packages/db-dtypes/.github/release-trigger.yml +++ b/packages/db-dtypes/.github/release-trigger.yml @@ -1,2 +1,2 @@ enabled: true -multiScmName: +multiScmName: python-db-dtypes-pandas From 1d63e1eef7842d58ae2bcf62441b9bb4f4c41675 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 12 Nov 2024 21:06:59 +0800 Subject: [PATCH 172/210] chore(python): update dependencies in .kokoro/docker/docs (#301) Source-Link: https://github.com/googleapis/synthtool/commit/59171c8f83f3522ce186e4d110d27e772da4ba7a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:2ed982f884312e4883e01b5ab8af8b6935f0216a5a2d82928d273081fc3be562 Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 4 ++-- .../.kokoro/docker/docs/requirements.txt | 20 +++++++++---------- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index b2770d4e0379..6301519a9a05 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:91d0075c6f2fd6a073a06168feee19fa2a8507692f2519a1dc7de3366d157e99 -# created: 2024-11-11T16:13:09.302418532Z + digest: sha256:2ed982f884312e4883e01b5ab8af8b6935f0216a5a2d82928d273081fc3be562 +# created: 2024-11-12T12:09:45.821174897Z diff --git a/packages/db-dtypes/.kokoro/docker/docs/requirements.txt b/packages/db-dtypes/.kokoro/docker/docs/requirements.txt index 66eacc82f041..8bb0764594b1 100644 --- a/packages/db-dtypes/.kokoro/docker/docs/requirements.txt +++ b/packages/db-dtypes/.kokoro/docker/docs/requirements.txt @@ -1,5 +1,5 @@ # -# This file is autogenerated by pip-compile with Python 3.9 +# This file is autogenerated by pip-compile with Python 3.10 # by the following command: # # pip-compile --allow-unsafe --generate-hashes requirements.in @@ -8,9 +8,9 @@ argcomplete==3.5.1 \ --hash=sha256:1a1d148bdaa3e3b93454900163403df41448a248af01b6e849edc5ac08e6c363 \ --hash=sha256:eb1ee355aa2557bd3d0145de7b06b2a45b0ce461e1e7813f5d066039ab4177b4 # via nox -colorlog==6.8.2 \ - --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ - --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33 +colorlog==6.9.0 \ + --hash=sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff \ + --hash=sha256:bfba54a1b93b94f54e1f4fe48395725a3d92fd2a4af702f6bd70946bdc0c6ac2 # via nox distlib==0.3.9 \ --hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \ @@ -24,9 +24,9 @@ nox==2024.10.9 \ --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 # via -r requirements.in -packaging==24.1 \ - --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ - --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 +packaging==24.2 \ + --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ + --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f # via nox platformdirs==4.3.6 \ --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ @@ -36,7 +36,7 @@ tomli==2.0.2 \ --hash=sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38 \ --hash=sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed # via nox -virtualenv==20.26.6 \ - --hash=sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48 \ - --hash=sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2 +virtualenv==20.27.1 \ + --hash=sha256:142c6be10212543b32c6c45d3d3893dff89112cc588b7d0879ae5a1ec03a47ba \ + --hash=sha256:f11f1b8a29525562925f745563bfd48b189450f61fb34c4f9cc79dd5aa32a1f4 # via nox From aa6d554f787663c7db51004a8f370f56dc7ed482 Mon Sep 17 00:00:00 2001 From: Chelsea Lin Date: Tue, 12 Nov 2024 08:43:35 -0800 Subject: [PATCH 173/210] fix: support correct numpy construction for dbjson dtype in pandas 1.5 (#297) * fix: support correct numpy construction for dbjson dtype in pandas 1.5 * add unit tests for pandas 1.5 * nit * fixing import error in python 3.7 * update unit tests * nit --- packages/db-dtypes/db_dtypes/json.py | 13 ++++++++ .../db-dtypes/testing/constraints-3.9.txt | 2 +- packages/db-dtypes/tests/unit/test_json.py | 33 +++++++++++++++++++ 3 files changed, 47 insertions(+), 1 deletion(-) diff --git a/packages/db-dtypes/db_dtypes/json.py b/packages/db-dtypes/db_dtypes/json.py index d0bc6ca92a33..c43ebc2b1f11 100644 --- a/packages/db-dtypes/db_dtypes/json.py +++ b/packages/db-dtypes/db_dtypes/json.py @@ -231,3 +231,16 @@ def _reduce( if name in ["min", "max"]: raise TypeError("JSONArray does not support min/max reducntion.") super()._reduce(name, skipna=skipna, keepdims=keepdims, **kwargs) + + def __array__(self, dtype=None, copy: bool | None = None) -> np.ndarray: + """Correctly construct numpy arrays when passed to `np.asarray()`.""" + pa_type = self.pa_data.type + data = self + if dtype is None: + empty = pa.array([], type=pa_type).to_numpy(zero_copy_only=False) + dtype = empty.dtype + result = np.empty(len(data), dtype=dtype) + mask = data.isna() + result[mask] = self._dtype.na_value + result[~mask] = data[~mask].pa_data.to_numpy() + return result diff --git a/packages/db-dtypes/testing/constraints-3.9.txt b/packages/db-dtypes/testing/constraints-3.9.txt index 470082585f6d..afea9b0db5c1 100644 --- a/packages/db-dtypes/testing/constraints-3.9.txt +++ b/packages/db-dtypes/testing/constraints-3.9.txt @@ -1,3 +1,3 @@ -# Make sure we test with pandas 1.5.0. The Python version isn't that relevant. +# Make sure we test with pandas 1.5.3. The Python version isn't that relevant. pandas==1.5.3 numpy==1.24.0 \ No newline at end of file diff --git a/packages/db-dtypes/tests/unit/test_json.py b/packages/db-dtypes/tests/unit/test_json.py index 365bd8f68ddd..112b50c99b2a 100644 --- a/packages/db-dtypes/tests/unit/test_json.py +++ b/packages/db-dtypes/tests/unit/test_json.py @@ -12,7 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +import json +import numpy as np import pandas as pd import pytest @@ -81,3 +83,34 @@ def test_deterministic_json_serialization(): y = {"b": 1, "a": 0} data = db_dtypes.JSONArray._from_sequence([y]) assert data[0] == x + + +def test_to_numpy(): + """ + Verifies that JSONArray can be cast to a NumPy array. + This test ensures compatibility with Python 3.9 and replicates the behavior + of the `test_to_numpy` test from `test_json_compliance.py::TestJSONArrayCasting`, + which is run with Python 3.12 environments only. + """ + data = db_dtypes.JSONArray._from_sequence(JSON_DATA.values()) + expected = np.asarray(data) + + result = data.to_numpy() + pd._testing.assert_equal(result, expected) + + result = pd.Series(data).to_numpy() + pd._testing.assert_equal(result, expected) + + +def test_as_numpy_array(): + data = db_dtypes.JSONArray._from_sequence(JSON_DATA.values()) + result = np.asarray(data) + expected = np.asarray( + [ + json.dumps(value, sort_keys=True, separators=(",", ":")) + if value is not None + else pd.NA + for value in JSON_DATA.values() + ] + ) + pd._testing.assert_equal(result, expected) From 957a8c837216a6883f30abdcebdb7f5dd5765a3b Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 12 Nov 2024 10:55:11 -0600 Subject: [PATCH 174/210] chore(main): release 1.3.1 (#296) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/db-dtypes/CHANGELOG.md | 9 +++++++++ packages/db-dtypes/db_dtypes/version.py | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/packages/db-dtypes/CHANGELOG.md b/packages/db-dtypes/CHANGELOG.md index 0d26b4aa9836..fd4ce0297e79 100644 --- a/packages/db-dtypes/CHANGELOG.md +++ b/packages/db-dtypes/CHANGELOG.md @@ -1,5 +1,14 @@ # Changelog +## [1.3.1](https://github.com/googleapis/python-db-dtypes-pandas/compare/v1.3.0...v1.3.1) (2024-11-12) + + +### Bug Fixes + +* Dbjson serialization with most compact JSON representation ([#299](https://github.com/googleapis/python-db-dtypes-pandas/issues/299)) ([c5e9a10](https://github.com/googleapis/python-db-dtypes-pandas/commit/c5e9a101022844e735099d5f2c645ce0cc46f7f8)) +* Support correct numpy construction for dbjson dtype in pandas 1.5 ([#297](https://github.com/googleapis/python-db-dtypes-pandas/issues/297)) ([f413f35](https://github.com/googleapis/python-db-dtypes-pandas/commit/f413f3527941fe52af7e19e2954a936bb3de8394)) +* Support dbjson type on pandas version 1.5 ([#295](https://github.com/googleapis/python-db-dtypes-pandas/issues/295)) ([4b84e4a](https://github.com/googleapis/python-db-dtypes-pandas/commit/4b84e4a6fada5ecfa7f910dca61e6de714abdb9d)) + ## [1.3.0](https://github.com/googleapis/python-db-dtypes-pandas/compare/v1.2.0...v1.3.0) (2024-08-08) diff --git a/packages/db-dtypes/db_dtypes/version.py b/packages/db-dtypes/db_dtypes/version.py index 8b2c0bc20730..081cecee1938 100644 --- a/packages/db-dtypes/db_dtypes/version.py +++ b/packages/db-dtypes/db_dtypes/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "1.3.0" +__version__ = "1.3.1" From a361d3e0eb96c8da8462f57f8af876eaeb49d8e5 Mon Sep 17 00:00:00 2001 From: Lingqing Gan Date: Tue, 19 Nov 2024 07:26:47 +0800 Subject: [PATCH 175/210] test: use python 3.10 for docs (#305) --- packages/db-dtypes/noxfile.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/db-dtypes/noxfile.py b/packages/db-dtypes/noxfile.py index 9587e6e7dd7b..c487cd79a6ec 100644 --- a/packages/db-dtypes/noxfile.py +++ b/packages/db-dtypes/noxfile.py @@ -376,7 +376,7 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python="3.9") +@nox.session(python="3.10") def docs(session): """Build the docs for this library.""" From 5110c8721776cfbf015596a8ec05ca6a3124bd30 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 18 Dec 2024 02:39:28 +0800 Subject: [PATCH 176/210] chore(python): update dependencies in .kokoro/docker/docs (#308) Source-Link: https://github.com/googleapis/synthtool/commit/e808c98e1ab7eec3df2a95a05331619f7001daef Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:8e3e7e18255c22d1489258d0374c901c01f9c4fd77a12088670cd73d580aa737 Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 4 +- .../.kokoro/docker/docs/requirements.txt | 52 +++++++++++++++---- 2 files changed, 43 insertions(+), 13 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 6301519a9a05..26306af66f81 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:2ed982f884312e4883e01b5ab8af8b6935f0216a5a2d82928d273081fc3be562 -# created: 2024-11-12T12:09:45.821174897Z + digest: sha256:8e3e7e18255c22d1489258d0374c901c01f9c4fd77a12088670cd73d580aa737 +# created: 2024-12-17T00:59:58.625514486Z diff --git a/packages/db-dtypes/.kokoro/docker/docs/requirements.txt b/packages/db-dtypes/.kokoro/docker/docs/requirements.txt index 8bb0764594b1..f99a5c4aac7f 100644 --- a/packages/db-dtypes/.kokoro/docker/docs/requirements.txt +++ b/packages/db-dtypes/.kokoro/docker/docs/requirements.txt @@ -2,11 +2,11 @@ # This file is autogenerated by pip-compile with Python 3.10 # by the following command: # -# pip-compile --allow-unsafe --generate-hashes requirements.in +# pip-compile --allow-unsafe --generate-hashes synthtool/gcp/templates/python_library/.kokoro/docker/docs/requirements.in # -argcomplete==3.5.1 \ - --hash=sha256:1a1d148bdaa3e3b93454900163403df41448a248af01b6e849edc5ac08e6c363 \ - --hash=sha256:eb1ee355aa2557bd3d0145de7b06b2a45b0ce461e1e7813f5d066039ab4177b4 +argcomplete==3.5.2 \ + --hash=sha256:036d020d79048a5d525bc63880d7a4b8d1668566b8a76daf1144c0bbe0f63472 \ + --hash=sha256:23146ed7ac4403b70bd6026402468942ceba34a6732255b9edf5b7354f68a6bb # via nox colorlog==6.9.0 \ --hash=sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff \ @@ -23,7 +23,7 @@ filelock==3.16.1 \ nox==2024.10.9 \ --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 - # via -r requirements.in + # via -r synthtool/gcp/templates/python_library/.kokoro/docker/docs/requirements.in packaging==24.2 \ --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f @@ -32,11 +32,41 @@ platformdirs==4.3.6 \ --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb # via virtualenv -tomli==2.0.2 \ - --hash=sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38 \ - --hash=sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed +tomli==2.2.1 \ + --hash=sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6 \ + --hash=sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd \ + --hash=sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c \ + --hash=sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b \ + --hash=sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8 \ + --hash=sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6 \ + --hash=sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77 \ + --hash=sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff \ + --hash=sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea \ + --hash=sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192 \ + --hash=sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249 \ + --hash=sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee \ + --hash=sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4 \ + --hash=sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98 \ + --hash=sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8 \ + --hash=sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4 \ + --hash=sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281 \ + --hash=sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744 \ + --hash=sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69 \ + --hash=sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13 \ + --hash=sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140 \ + --hash=sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e \ + --hash=sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e \ + --hash=sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc \ + --hash=sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff \ + --hash=sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec \ + --hash=sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2 \ + --hash=sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222 \ + --hash=sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106 \ + --hash=sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272 \ + --hash=sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a \ + --hash=sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7 # via nox -virtualenv==20.27.1 \ - --hash=sha256:142c6be10212543b32c6c45d3d3893dff89112cc588b7d0879ae5a1ec03a47ba \ - --hash=sha256:f11f1b8a29525562925f745563bfd48b189450f61fb34c4f9cc79dd5aa32a1f4 +virtualenv==20.28.0 \ + --hash=sha256:23eae1b4516ecd610481eda647f3a7c09aea295055337331bb4e6892ecce47b0 \ + --hash=sha256:2c9c3262bb8e7b87ea801d715fae4495e6032450c71d2309be9550e7364049aa # via nox From 1bb656c250d4b28bea2d04aeba37418b40260f99 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 17 Jan 2025 14:12:24 -0800 Subject: [PATCH 177/210] chore(python): fix docs publish build (#313) * chore(python): fix docs publish build Source-Link: https://github.com/googleapis/synthtool/commit/bd9ede2fea1b640b7e90d5a1d110e6b300a2b43f Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:04c35dc5f49f0f503a306397d6d043685f8d2bb822ab515818c4208d7fb2db3a * remove unit3.7 * remove uncovered tests after dropping python 3.7 --------- Co-authored-by: Owl Bot Co-authored-by: Chelsea Lin --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 6 +- packages/db-dtypes/.github/workflows/docs.yml | 2 +- .../db-dtypes/.github/workflows/unittest.yml | 2 +- .../.kokoro/docker/docs/requirements.in | 1 + .../.kokoro/docker/docs/requirements.txt | 243 +++++++++++++++++- packages/db-dtypes/.kokoro/publish-docs.sh | 4 - .../db-dtypes/db_dtypes/pandas_backports.py | 26 +- packages/db-dtypes/renovate.json | 2 +- packages/db-dtypes/tests/unit/test_date.py | 3 - packages/db-dtypes/tests/unit/test_dtypes.py | 3 +- 10 files changed, 243 insertions(+), 49 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 26306af66f81..4c0027ff1c61 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -1,4 +1,4 @@ -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:8e3e7e18255c22d1489258d0374c901c01f9c4fd77a12088670cd73d580aa737 -# created: 2024-12-17T00:59:58.625514486Z + digest: sha256:04c35dc5f49f0f503a306397d6d043685f8d2bb822ab515818c4208d7fb2db3a +# created: 2025-01-16T15:24:11.364245182Z diff --git a/packages/db-dtypes/.github/workflows/docs.yml b/packages/db-dtypes/.github/workflows/docs.yml index 698fbc5c94da..2833fe98fff0 100644 --- a/packages/db-dtypes/.github/workflows/docs.yml +++ b/packages/db-dtypes/.github/workflows/docs.yml @@ -12,7 +12,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v5 with: - python-version: "3.9" + python-version: "3.10" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel diff --git a/packages/db-dtypes/.github/workflows/unittest.yml b/packages/db-dtypes/.github/workflows/unittest.yml index 000e945eb5ed..2386b3aa010b 100644 --- a/packages/db-dtypes/.github/workflows/unittest.yml +++ b/packages/db-dtypes/.github/workflows/unittest.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12'] + python: ['3.8', '3.9', '3.10', '3.11', '3.12'] steps: - name: Checkout uses: actions/checkout@v4 diff --git a/packages/db-dtypes/.kokoro/docker/docs/requirements.in b/packages/db-dtypes/.kokoro/docker/docs/requirements.in index 816817c672a1..586bd07037ae 100644 --- a/packages/db-dtypes/.kokoro/docker/docs/requirements.in +++ b/packages/db-dtypes/.kokoro/docker/docs/requirements.in @@ -1 +1,2 @@ nox +gcp-docuploader diff --git a/packages/db-dtypes/.kokoro/docker/docs/requirements.txt b/packages/db-dtypes/.kokoro/docker/docs/requirements.txt index f99a5c4aac7f..a9360a25b707 100644 --- a/packages/db-dtypes/.kokoro/docker/docs/requirements.txt +++ b/packages/db-dtypes/.kokoro/docker/docs/requirements.txt @@ -2,16 +2,124 @@ # This file is autogenerated by pip-compile with Python 3.10 # by the following command: # -# pip-compile --allow-unsafe --generate-hashes synthtool/gcp/templates/python_library/.kokoro/docker/docs/requirements.in +# pip-compile --allow-unsafe --generate-hashes requirements.in # -argcomplete==3.5.2 \ - --hash=sha256:036d020d79048a5d525bc63880d7a4b8d1668566b8a76daf1144c0bbe0f63472 \ - --hash=sha256:23146ed7ac4403b70bd6026402468942ceba34a6732255b9edf5b7354f68a6bb +argcomplete==3.5.3 \ + --hash=sha256:2ab2c4a215c59fd6caaff41a869480a23e8f6a5f910b266c1808037f4e375b61 \ + --hash=sha256:c12bf50eded8aebb298c7b7da7a5ff3ee24dffd9f5281867dfe1424b58c55392 # via nox +cachetools==5.5.0 \ + --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \ + --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a + # via google-auth +certifi==2024.12.14 \ + --hash=sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56 \ + --hash=sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db + # via requests +charset-normalizer==3.4.1 \ + --hash=sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537 \ + --hash=sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa \ + --hash=sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a \ + --hash=sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294 \ + --hash=sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b \ + --hash=sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd \ + --hash=sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601 \ + --hash=sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd \ + --hash=sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4 \ + --hash=sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d \ + --hash=sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2 \ + --hash=sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313 \ + --hash=sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd \ + --hash=sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa \ + --hash=sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8 \ + --hash=sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1 \ + --hash=sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2 \ + --hash=sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496 \ + --hash=sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d \ + --hash=sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b \ + --hash=sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e \ + --hash=sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a \ + --hash=sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4 \ + --hash=sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca \ + --hash=sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78 \ + --hash=sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408 \ + --hash=sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5 \ + --hash=sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3 \ + --hash=sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f \ + --hash=sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a \ + --hash=sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765 \ + --hash=sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6 \ + --hash=sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146 \ + --hash=sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6 \ + --hash=sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9 \ + --hash=sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd \ + --hash=sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c \ + --hash=sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f \ + --hash=sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545 \ + --hash=sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176 \ + --hash=sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770 \ + --hash=sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824 \ + --hash=sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f \ + --hash=sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf \ + --hash=sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487 \ + --hash=sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d \ + --hash=sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd \ + --hash=sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b \ + --hash=sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534 \ + --hash=sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f \ + --hash=sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b \ + --hash=sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9 \ + --hash=sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd \ + --hash=sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125 \ + --hash=sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9 \ + --hash=sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de \ + --hash=sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11 \ + --hash=sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d \ + --hash=sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35 \ + --hash=sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f \ + --hash=sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda \ + --hash=sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7 \ + --hash=sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a \ + --hash=sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971 \ + --hash=sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8 \ + --hash=sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41 \ + --hash=sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d \ + --hash=sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f \ + --hash=sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757 \ + --hash=sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a \ + --hash=sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886 \ + --hash=sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77 \ + --hash=sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76 \ + --hash=sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247 \ + --hash=sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85 \ + --hash=sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb \ + --hash=sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7 \ + --hash=sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e \ + --hash=sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6 \ + --hash=sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037 \ + --hash=sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1 \ + --hash=sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e \ + --hash=sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807 \ + --hash=sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407 \ + --hash=sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c \ + --hash=sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12 \ + --hash=sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3 \ + --hash=sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089 \ + --hash=sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd \ + --hash=sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e \ + --hash=sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00 \ + --hash=sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616 + # via requests +click==8.1.8 \ + --hash=sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2 \ + --hash=sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a + # via gcp-docuploader colorlog==6.9.0 \ --hash=sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff \ --hash=sha256:bfba54a1b93b94f54e1f4fe48395725a3d92fd2a4af702f6bd70946bdc0c6ac2 - # via nox + # via + # gcp-docuploader + # nox distlib==0.3.9 \ --hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \ --hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403 @@ -20,10 +128,78 @@ filelock==3.16.1 \ --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \ --hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435 # via virtualenv +gcp-docuploader==0.6.5 \ + --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ + --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea + # via -r requirements.in +google-api-core==2.24.0 \ + --hash=sha256:10d82ac0fca69c82a25b3efdeefccf6f28e02ebb97925a8cce8edbfe379929d9 \ + --hash=sha256:e255640547a597a4da010876d333208ddac417d60add22b6851a0c66a831fcaf + # via + # google-cloud-core + # google-cloud-storage +google-auth==2.37.0 \ + --hash=sha256:0054623abf1f9c83492c63d3f47e77f0a544caa3d40b2d98e099a611c2dd5d00 \ + --hash=sha256:42664f18290a6be591be5329a96fe30184be1a1badb7292a7f686a9659de9ca0 + # via + # google-api-core + # google-cloud-core + # google-cloud-storage +google-cloud-core==2.4.1 \ + --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \ + --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61 + # via google-cloud-storage +google-cloud-storage==2.19.0 \ + --hash=sha256:aeb971b5c29cf8ab98445082cbfe7b161a1f48ed275822f59ed3f1524ea54fba \ + --hash=sha256:cd05e9e7191ba6cb68934d8eb76054d9be4562aa89dbc4236feee4d7d51342b2 + # via gcp-docuploader +google-crc32c==1.6.0 \ + --hash=sha256:05e2d8c9a2f853ff116db9706b4a27350587f341eda835f46db3c0a8c8ce2f24 \ + --hash=sha256:18e311c64008f1f1379158158bb3f0c8d72635b9eb4f9545f8cf990c5668e59d \ + --hash=sha256:236c87a46cdf06384f614e9092b82c05f81bd34b80248021f729396a78e55d7e \ + --hash=sha256:35834855408429cecf495cac67ccbab802de269e948e27478b1e47dfb6465e57 \ + --hash=sha256:386122eeaaa76951a8196310432c5b0ef3b53590ef4c317ec7588ec554fec5d2 \ + --hash=sha256:40b05ab32a5067525670880eb5d169529089a26fe35dce8891127aeddc1950e8 \ + --hash=sha256:48abd62ca76a2cbe034542ed1b6aee851b6f28aaca4e6551b5599b6f3ef175cc \ + --hash=sha256:50cf2a96da226dcbff8671233ecf37bf6e95de98b2a2ebadbfdf455e6d05df42 \ + --hash=sha256:51c4f54dd8c6dfeb58d1df5e4f7f97df8abf17a36626a217f169893d1d7f3e9f \ + --hash=sha256:5bcc90b34df28a4b38653c36bb5ada35671ad105c99cfe915fb5bed7ad6924aa \ + --hash=sha256:62f6d4a29fea082ac4a3c9be5e415218255cf11684ac6ef5488eea0c9132689b \ + --hash=sha256:6eceb6ad197656a1ff49ebfbbfa870678c75be4344feb35ac1edf694309413dc \ + --hash=sha256:7aec8e88a3583515f9e0957fe4f5f6d8d4997e36d0f61624e70469771584c760 \ + --hash=sha256:91ca8145b060679ec9176e6de4f89b07363d6805bd4760631ef254905503598d \ + --hash=sha256:a184243544811e4a50d345838a883733461e67578959ac59964e43cca2c791e7 \ + --hash=sha256:a9e4b426c3702f3cd23b933436487eb34e01e00327fac20c9aebb68ccf34117d \ + --hash=sha256:bb0966e1c50d0ef5bc743312cc730b533491d60585a9a08f897274e57c3f70e0 \ + --hash=sha256:bb8b3c75bd157010459b15222c3fd30577042a7060e29d42dabce449c087f2b3 \ + --hash=sha256:bd5e7d2445d1a958c266bfa5d04c39932dc54093fa391736dbfdb0f1929c1fb3 \ + --hash=sha256:c87d98c7c4a69066fd31701c4e10d178a648c2cac3452e62c6b24dc51f9fcc00 \ + --hash=sha256:d2952396dc604544ea7476b33fe87faedc24d666fb0c2d5ac971a2b9576ab871 \ + --hash=sha256:d8797406499f28b5ef791f339594b0b5fdedf54e203b5066675c406ba69d705c \ + --hash=sha256:d9e9913f7bd69e093b81da4535ce27af842e7bf371cde42d1ae9e9bd382dc0e9 \ + --hash=sha256:e2806553238cd076f0a55bddab37a532b53580e699ed8e5606d0de1f856b5205 \ + --hash=sha256:ebab974b1687509e5c973b5c4b8b146683e101e102e17a86bd196ecaa4d099fc \ + --hash=sha256:ed767bf4ba90104c1216b68111613f0d5926fb3780660ea1198fc469af410e9d \ + --hash=sha256:f7a1fc29803712f80879b0806cb83ab24ce62fc8daf0569f2204a0cfd7f68ed4 + # via + # google-cloud-storage + # google-resumable-media +google-resumable-media==2.7.2 \ + --hash=sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa \ + --hash=sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0 + # via google-cloud-storage +googleapis-common-protos==1.66.0 \ + --hash=sha256:c3e7b33d15fdca5374cc0a7346dd92ffa847425cc4ea941d970f13680052ec8c \ + --hash=sha256:d7abcd75fabb2e0ec9f74466401f6c119a0b498e27370e9be4c94cb7e382b8ed + # via google-api-core +idna==3.10 \ + --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ + --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 + # via requests nox==2024.10.9 \ --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 - # via -r synthtool/gcp/templates/python_library/.kokoro/docker/docs/requirements.in + # via -r requirements.in packaging==24.2 \ --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f @@ -32,6 +208,51 @@ platformdirs==4.3.6 \ --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb # via virtualenv +proto-plus==1.25.0 \ + --hash=sha256:c91fc4a65074ade8e458e95ef8bac34d4008daa7cce4a12d6707066fca648961 \ + --hash=sha256:fbb17f57f7bd05a68b7707e745e26528b0b3c34e378db91eef93912c54982d91 + # via google-api-core +protobuf==5.29.3 \ + --hash=sha256:0a18ed4a24198528f2333802eb075e59dea9d679ab7a6c5efb017a59004d849f \ + --hash=sha256:0eb32bfa5219fc8d4111803e9a690658aa2e6366384fd0851064b963b6d1f2a7 \ + --hash=sha256:3ea51771449e1035f26069c4c7fd51fba990d07bc55ba80701c78f886bf9c888 \ + --hash=sha256:5da0f41edaf117bde316404bad1a486cb4ededf8e4a54891296f648e8e076620 \ + --hash=sha256:6ce8cc3389a20693bfde6c6562e03474c40851b44975c9b2bf6df7d8c4f864da \ + --hash=sha256:84a57163a0ccef3f96e4b6a20516cedcf5bb3a95a657131c5c3ac62200d23252 \ + --hash=sha256:a4fa6f80816a9a0678429e84973f2f98cbc218cca434abe8db2ad0bffc98503a \ + --hash=sha256:a8434404bbf139aa9e1300dbf989667a83d42ddda9153d8ab76e0d5dcaca484e \ + --hash=sha256:b89c115d877892a512f79a8114564fb435943b59067615894c3b13cd3e1fa107 \ + --hash=sha256:c027e08a08be10b67c06bf2370b99c811c466398c357e615ca88c91c07f0910f \ + --hash=sha256:daaf63f70f25e8689c072cfad4334ca0ac1d1e05a92fc15c54eb9cf23c3efd84 + # via + # gcp-docuploader + # google-api-core + # googleapis-common-protos + # proto-plus +pyasn1==0.6.1 \ + --hash=sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629 \ + --hash=sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034 + # via + # pyasn1-modules + # rsa +pyasn1-modules==0.4.1 \ + --hash=sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd \ + --hash=sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c + # via google-auth +requests==2.32.3 \ + --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ + --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 + # via + # google-api-core + # google-cloud-storage +rsa==4.9 \ + --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ + --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 + # via google-auth +six==1.17.0 \ + --hash=sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274 \ + --hash=sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81 + # via gcp-docuploader tomli==2.2.1 \ --hash=sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6 \ --hash=sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd \ @@ -66,7 +287,11 @@ tomli==2.2.1 \ --hash=sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a \ --hash=sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7 # via nox -virtualenv==20.28.0 \ - --hash=sha256:23eae1b4516ecd610481eda647f3a7c09aea295055337331bb4e6892ecce47b0 \ - --hash=sha256:2c9c3262bb8e7b87ea801d715fae4495e6032450c71d2309be9550e7364049aa +urllib3==2.3.0 \ + --hash=sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df \ + --hash=sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d + # via requests +virtualenv==20.28.1 \ + --hash=sha256:412773c85d4dab0409b83ec36f7a6499e72eaf08c80e81e9576bca61831c71cb \ + --hash=sha256:5d34ab240fdb5d21549b76f9e8ff3af28252f5499fb6d6f031adac4e5a8c5329 # via nox diff --git a/packages/db-dtypes/.kokoro/publish-docs.sh b/packages/db-dtypes/.kokoro/publish-docs.sh index 233205d580e9..4ed4aaf1346f 100755 --- a/packages/db-dtypes/.kokoro/publish-docs.sh +++ b/packages/db-dtypes/.kokoro/publish-docs.sh @@ -20,10 +20,6 @@ export PYTHONUNBUFFERED=1 export PATH="${HOME}/.local/bin:${PATH}" -# Install nox -python3.10 -m pip install --require-hashes -r .kokoro/requirements.txt -python3.10 -m nox --version - # build docs nox -s docs diff --git a/packages/db-dtypes/db_dtypes/pandas_backports.py b/packages/db-dtypes/db_dtypes/pandas_backports.py index e3aea1783fd6..8112c549d2ce 100644 --- a/packages/db-dtypes/db_dtypes/pandas_backports.py +++ b/packages/db-dtypes/db_dtypes/pandas_backports.py @@ -19,7 +19,6 @@ the versions in the later versions of pandas. """ -import operator from typing import Any import numpy @@ -63,10 +62,7 @@ def import_default(module_name, force=False, default=None): return default name = default.__name__ - try: - module = __import__(module_name, {}, {}, [name]) - except ModuleNotFoundError: - return default + module = __import__(module_name, {}, {}, [name]) return getattr(module, name, default) @@ -80,26 +76,6 @@ class OpsMixin: def _cmp_method(self, other, op): # pragma: NO COVER return NotImplemented - def __eq__(self, other): - return self._cmp_method(other, operator.eq) - - def __ne__(self, other): - return self._cmp_method(other, operator.ne) - - def __lt__(self, other): - return self._cmp_method(other, operator.lt) - - def __le__(self, other): - return self._cmp_method(other, operator.le) - - def __gt__(self, other): - return self._cmp_method(other, operator.gt) - - def __ge__(self, other): - return self._cmp_method(other, operator.ge) - - __add__ = __radd__ = __sub__ = lambda self, other: NotImplemented - # TODO: use public API once pandas 1.5 / 2.x is released. # See: https://github.com/pandas-dev/pandas/pull/45544 diff --git a/packages/db-dtypes/renovate.json b/packages/db-dtypes/renovate.json index 39b2a0ec9296..c7875c469bd5 100644 --- a/packages/db-dtypes/renovate.json +++ b/packages/db-dtypes/renovate.json @@ -5,7 +5,7 @@ ":preserveSemverRanges", ":disableDependencyDashboard" ], - "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py", ".github/workflows/unittest.yml"], "pip_requirements": { "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] } diff --git a/packages/db-dtypes/tests/unit/test_date.py b/packages/db-dtypes/tests/unit/test_date.py index 9dbc6d9ccddc..a3ce43eed450 100644 --- a/packages/db-dtypes/tests/unit/test_date.py +++ b/packages/db-dtypes/tests/unit/test_date.py @@ -48,9 +48,6 @@ float("nan"), ] -if hasattr(pandas, "NA"): - NULL_VALUE_TEST_CASES.append(pandas.NA) - def test_box_func(): input_array = db_dtypes.DateArray([]) diff --git a/packages/db-dtypes/tests/unit/test_dtypes.py b/packages/db-dtypes/tests/unit/test_dtypes.py index f2c5593c8fec..87b6a920b7c1 100644 --- a/packages/db-dtypes/tests/unit/test_dtypes.py +++ b/packages/db-dtypes/tests/unit/test_dtypes.py @@ -584,8 +584,7 @@ def test_date_add(): do = pd.DateOffset(days=1) expect = dates.astype("object") + do np.testing.assert_array_equal(dates + do, expect) - if pandas_release >= (1, 1): - np.testing.assert_array_equal(do + dates, expect) + np.testing.assert_array_equal(do + dates, expect) with pytest.raises(TypeError): dates + times.astype("timedelta64") From c0d526f354963ac68a31a08f25073eb31ce1c3d0 Mon Sep 17 00:00:00 2001 From: Chelsea Lin Date: Fri, 17 Jan 2025 15:29:32 -0800 Subject: [PATCH 178/210] feat: Add Arrow types for efficient JSON data representation in pyarrow (#312) * feat: add ArrowJSONtype to extend pyarrow for JSONDtype * nit * add JSONArrowScalar * fix cover --- packages/db-dtypes/db_dtypes/__init__.py | 6 +- packages/db-dtypes/db_dtypes/json.py | 40 ++++++ .../compliance/json/test_json_compliance.py | 4 - packages/db-dtypes/tests/unit/test_json.py | 123 +++++++++++++++++- 4 files changed, 166 insertions(+), 7 deletions(-) diff --git a/packages/db-dtypes/db_dtypes/__init__.py b/packages/db-dtypes/db_dtypes/__init__.py index 952643b45058..d5b05dc8eaff 100644 --- a/packages/db-dtypes/db_dtypes/__init__.py +++ b/packages/db-dtypes/db_dtypes/__init__.py @@ -30,8 +30,8 @@ from db_dtypes import core from db_dtypes.version import __version__ -from . import _versions_helpers +from . import _versions_helpers date_dtype_name = "dbdate" time_dtype_name = "dbtime" @@ -50,7 +50,7 @@ # To use JSONArray and JSONDtype, you'll need Pandas 1.5.0 or later. With the removal # of Python 3.7 compatibility, the minimum Pandas version will be updated to 1.5.0. if packaging.version.Version(pandas.__version__) >= packaging.version.Version("1.5.0"): - from db_dtypes.json import JSONArray, JSONDtype + from db_dtypes.json import JSONArray, JSONArrowScalar, JSONArrowType, JSONDtype else: JSONArray = None JSONDtype = None @@ -374,6 +374,8 @@ def __sub__(self, other): "DateDtype", "JSONDtype", "JSONArray", + "JSONArrowType", + "JSONArrowScalar", "TimeArray", "TimeDtype", ] diff --git a/packages/db-dtypes/db_dtypes/json.py b/packages/db-dtypes/db_dtypes/json.py index c43ebc2b1f11..145eec3a9ed6 100644 --- a/packages/db-dtypes/db_dtypes/json.py +++ b/packages/db-dtypes/db_dtypes/json.py @@ -64,6 +64,10 @@ def construct_array_type(cls): """Return the array type associated with this dtype.""" return JSONArray + def __from_arrow__(self, array: pa.Array | pa.ChunkedArray) -> JSONArray: + """Convert the pyarrow array to the extension array.""" + return JSONArray(array) + class JSONArray(arrays.ArrowExtensionArray): """Extension array that handles BigQuery JSON data, leveraging a string-based @@ -92,6 +96,10 @@ def __init__(self, values) -> None: else: raise NotImplementedError(f"Unsupported pandas version: {pd.__version__}") + def __arrow_array__(self, type=None): + """Convert to an arrow array. This is required for pyarrow extension.""" + return pa.array(self.pa_data, type=JSONArrowType()) + @classmethod def _box_pa( cls, value, pa_type: pa.DataType | None = None @@ -208,6 +216,8 @@ def __getitem__(self, item): value = self.pa_data[item] if isinstance(value, pa.ChunkedArray): return type(self)(value) + elif isinstance(value, pa.ExtensionScalar): + return value.as_py() else: scalar = JSONArray._deserialize_json(value.as_py()) if scalar is None: @@ -244,3 +254,33 @@ def __array__(self, dtype=None, copy: bool | None = None) -> np.ndarray: result[mask] = self._dtype.na_value result[~mask] = data[~mask].pa_data.to_numpy() return result + + +class JSONArrowScalar(pa.ExtensionScalar): + def as_py(self): + return JSONArray._deserialize_json(self.value.as_py() if self.value else None) + + +class JSONArrowType(pa.ExtensionType): + """Arrow extension type for the `dbjson` Pandas extension type.""" + + def __init__(self) -> None: + super().__init__(pa.string(), "dbjson") + + def __arrow_ext_serialize__(self) -> bytes: + return b"" + + @classmethod + def __arrow_ext_deserialize__(cls, storage_type, serialized) -> JSONArrowType: + return JSONArrowType() + + def to_pandas_dtype(self): + return JSONDtype() + + def __arrow_ext_scalar_class__(self): + return JSONArrowScalar + + +# Register the type to be included in RecordBatches, sent over IPC and received in +# another Python process. +pa.register_extension_type(JSONArrowType()) diff --git a/packages/db-dtypes/tests/compliance/json/test_json_compliance.py b/packages/db-dtypes/tests/compliance/json/test_json_compliance.py index 2a8e69ae338f..9a0d0efbc877 100644 --- a/packages/db-dtypes/tests/compliance/json/test_json_compliance.py +++ b/packages/db-dtypes/tests/compliance/json/test_json_compliance.py @@ -22,10 +22,6 @@ import pytest -class TestJSONArrayAccumulate(base.BaseAccumulateTests): - pass - - class TestJSONArrayCasting(base.BaseCastingTests): def test_astype_str(self, data): # Use `json.dumps(str)` instead of passing `str(obj)` directly to the super method. diff --git a/packages/db-dtypes/tests/unit/test_json.py b/packages/db-dtypes/tests/unit/test_json.py index 112b50c99b2a..055eef0d532d 100644 --- a/packages/db-dtypes/tests/unit/test_json.py +++ b/packages/db-dtypes/tests/unit/test_json.py @@ -13,9 +13,11 @@ # limitations under the License. import json +import math import numpy as np import pandas as pd +import pyarrow as pa import pytest import db_dtypes @@ -36,7 +38,7 @@ "null_field": None, "order": { "items": ["book", "pen", "computer"], - "total": 15.99, + "total": 15, "address": {"street": "123 Main St", "city": "Anytown"}, }, }, @@ -114,3 +116,122 @@ def test_as_numpy_array(): ] ) pd._testing.assert_equal(result, expected) + + +def test_json_arrow_array(): + data = db_dtypes.JSONArray._from_sequence(JSON_DATA.values()) + assert isinstance(data.__arrow_array__(), pa.ExtensionArray) + + +def test_json_arrow_storage_type(): + arrow_json_type = db_dtypes.JSONArrowType() + assert arrow_json_type.extension_name == "dbjson" + assert pa.types.is_string(arrow_json_type.storage_type) + + +def test_json_arrow_constructors(): + data = [ + json.dumps(value, sort_keys=True, separators=(",", ":")) + for value in JSON_DATA.values() + ] + storage_array = pa.array(data, type=pa.string()) + + arr_1 = db_dtypes.JSONArrowType().wrap_array(storage_array) + assert isinstance(arr_1, pa.ExtensionArray) + + arr_2 = pa.ExtensionArray.from_storage(db_dtypes.JSONArrowType(), storage_array) + assert isinstance(arr_2, pa.ExtensionArray) + + assert arr_1 == arr_2 + + +def test_json_arrow_to_pandas(): + data = [ + json.dumps(value, sort_keys=True, separators=(",", ":")) + for value in JSON_DATA.values() + ] + arr = pa.array(data, type=db_dtypes.JSONArrowType()) + + s = arr.to_pandas() + assert isinstance(s.dtypes, db_dtypes.JSONDtype) + assert s[0] + assert s[1] == 100 + assert math.isclose(s[2], 0.98) + assert s[3] == "hello world" + assert math.isclose(s[4][0], 0.1) + assert math.isclose(s[4][1], 0.2) + assert s[5] == { + "null_field": None, + "order": { + "items": ["book", "pen", "computer"], + "total": 15, + "address": {"street": "123 Main St", "city": "Anytown"}, + }, + } + assert pd.isna(s[6]) + + +def test_json_arrow_to_pylist(): + data = [ + json.dumps(value, sort_keys=True, separators=(",", ":")) + for value in JSON_DATA.values() + ] + arr = pa.array(data, type=db_dtypes.JSONArrowType()) + + s = arr.to_pylist() + assert isinstance(s, list) + assert s[0] + assert s[1] == 100 + assert math.isclose(s[2], 0.98) + assert s[3] == "hello world" + assert math.isclose(s[4][0], 0.1) + assert math.isclose(s[4][1], 0.2) + assert s[5] == { + "null_field": None, + "order": { + "items": ["book", "pen", "computer"], + "total": 15, + "address": {"street": "123 Main St", "city": "Anytown"}, + }, + } + assert s[6] is None + + +def test_json_arrow_record_batch(): + data = [ + json.dumps(value, sort_keys=True, separators=(",", ":")) + for value in JSON_DATA.values() + ] + arr = pa.array(data, type=db_dtypes.JSONArrowType()) + batch = pa.RecordBatch.from_arrays([arr], ["json_col"]) + sink = pa.BufferOutputStream() + + with pa.RecordBatchStreamWriter(sink, batch.schema) as writer: + writer.write_batch(batch) + + buf = sink.getvalue() + + with pa.ipc.open_stream(buf) as reader: + result = reader.read_all() + + json_col = result.column("json_col") + assert isinstance(json_col.type, db_dtypes.JSONArrowType) + + s = json_col.to_pylist() + + assert isinstance(s, list) + assert s[0] + assert s[1] == 100 + assert math.isclose(s[2], 0.98) + assert s[3] == "hello world" + assert math.isclose(s[4][0], 0.1) + assert math.isclose(s[4][1], 0.2) + assert s[5] == { + "null_field": None, + "order": { + "items": ["book", "pen", "computer"], + "total": 15, + "address": {"street": "123 Main St", "city": "Anytown"}, + }, + } + assert s[6] is None From 16a4abde19e23c19b6e06727430c7a2520b0110b Mon Sep 17 00:00:00 2001 From: Chelsea Lin Date: Tue, 21 Jan 2025 12:53:56 -0800 Subject: [PATCH 179/210] feat: add __hash__ property for JSONArrowType (#316) --- packages/db-dtypes/db_dtypes/json.py | 3 +++ packages/db-dtypes/tests/unit/test_json.py | 5 +++++ 2 files changed, 8 insertions(+) diff --git a/packages/db-dtypes/db_dtypes/json.py b/packages/db-dtypes/db_dtypes/json.py index 145eec3a9ed6..99e0c67d86cb 100644 --- a/packages/db-dtypes/db_dtypes/json.py +++ b/packages/db-dtypes/db_dtypes/json.py @@ -274,6 +274,9 @@ def __arrow_ext_serialize__(self) -> bytes: def __arrow_ext_deserialize__(cls, storage_type, serialized) -> JSONArrowType: return JSONArrowType() + def __hash__(self) -> int: + return hash(str(self)) + def to_pandas_dtype(self): return JSONDtype() diff --git a/packages/db-dtypes/tests/unit/test_json.py b/packages/db-dtypes/tests/unit/test_json.py index 055eef0d532d..ff2c86748799 100644 --- a/packages/db-dtypes/tests/unit/test_json.py +++ b/packages/db-dtypes/tests/unit/test_json.py @@ -129,6 +129,11 @@ def test_json_arrow_storage_type(): assert pa.types.is_string(arrow_json_type.storage_type) +def test_json_arrow_hash(): + arr = pa.array([], type=db_dtypes.JSONArrowType()) + assert hash(arr.type) == hash(db_dtypes.JSONArrowType()) + + def test_json_arrow_constructors(): data = [ json.dumps(value, sort_keys=True, separators=(",", ":")) From c7f3354dbcc0ae4312cdb8186aac1a6b202b9f61 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 22 Jan 2025 11:49:51 -0800 Subject: [PATCH 180/210] chore(main): release 1.4.0 (#315) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/db-dtypes/CHANGELOG.md | 8 ++++++++ packages/db-dtypes/db_dtypes/version.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/db-dtypes/CHANGELOG.md b/packages/db-dtypes/CHANGELOG.md index fd4ce0297e79..7700d5aac48b 100644 --- a/packages/db-dtypes/CHANGELOG.md +++ b/packages/db-dtypes/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [1.4.0](https://github.com/googleapis/python-db-dtypes-pandas/compare/v1.3.1...v1.4.0) (2025-01-21) + + +### Features + +* Add __hash__ property for JSONArrowType ([#316](https://github.com/googleapis/python-db-dtypes-pandas/issues/316)) ([7073e37](https://github.com/googleapis/python-db-dtypes-pandas/commit/7073e37d1fe76c2078550a8c7f0e45e3fad26809)) +* Add Arrow types for efficient JSON data representation in pyarrow ([#312](https://github.com/googleapis/python-db-dtypes-pandas/issues/312)) ([d9992fc](https://github.com/googleapis/python-db-dtypes-pandas/commit/d9992fc6120351cb8ccb2dd86bd57e8097004285)) + ## [1.3.1](https://github.com/googleapis/python-db-dtypes-pandas/compare/v1.3.0...v1.3.1) (2024-11-12) diff --git a/packages/db-dtypes/db_dtypes/version.py b/packages/db-dtypes/db_dtypes/version.py index 081cecee1938..2612bd752670 100644 --- a/packages/db-dtypes/db_dtypes/version.py +++ b/packages/db-dtypes/db_dtypes/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "1.3.1" +__version__ = "1.4.0" From 9dceb6ad947acdc6f1b2ae6e0e2b94655bfc1d8e Mon Sep 17 00:00:00 2001 From: Chelsea Lin Date: Wed, 29 Jan 2025 14:33:30 -0800 Subject: [PATCH 181/210] chore: limit pyarrow version in unit-prerelease tests (#320) --- packages/db-dtypes/noxfile.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/db-dtypes/noxfile.py b/packages/db-dtypes/noxfile.py index c487cd79a6ec..bf5b3d2cf561 100644 --- a/packages/db-dtypes/noxfile.py +++ b/packages/db-dtypes/noxfile.py @@ -199,7 +199,10 @@ def prerelease(session, tests_path): "--prefer-binary", "--pre", "--upgrade", - "pyarrow", + # Limit pyarrow to versions prior to 20.0.0.dev19 to prevent a RuntimeWarning + # during import. This workaround can be removed once the underlying issue + # in pyarrow is resolved (see: https://github.com/apache/arrow/issues/45380). + "pyarrow<=20.0.0.dev18", ) # Avoid pandas==2.2.0rc0 as this version causes PyArrow to fail. Once newer # prerelease comes out, this constraint can be removed. See From 40844e93d5a33b4ce80da7d2ce03444f90be344a Mon Sep 17 00:00:00 2001 From: Chelsea Lin Date: Thu, 30 Jan 2025 13:24:54 -0800 Subject: [PATCH 182/210] fix: Re-add ModuleNotFoundError handler for pandas_backports (#319) * fix: bring back ModuleNotFoundError handler in pandas_backports.py * add unit-test for import_default functions --- .../db-dtypes/db_dtypes/pandas_backports.py | 5 ++- .../tests/unit/test_pandas_backports.py | 37 +++++++++++++++++++ 2 files changed, 41 insertions(+), 1 deletion(-) create mode 100644 packages/db-dtypes/tests/unit/test_pandas_backports.py diff --git a/packages/db-dtypes/db_dtypes/pandas_backports.py b/packages/db-dtypes/db_dtypes/pandas_backports.py index 8112c549d2ce..f8009ea59dfc 100644 --- a/packages/db-dtypes/db_dtypes/pandas_backports.py +++ b/packages/db-dtypes/db_dtypes/pandas_backports.py @@ -62,7 +62,10 @@ def import_default(module_name, force=False, default=None): return default name = default.__name__ - module = __import__(module_name, {}, {}, [name]) + try: + module = __import__(module_name, {}, {}, [name]) + except ModuleNotFoundError: + return default return getattr(module, name, default) diff --git a/packages/db-dtypes/tests/unit/test_pandas_backports.py b/packages/db-dtypes/tests/unit/test_pandas_backports.py new file mode 100644 index 000000000000..eb68b6ad2542 --- /dev/null +++ b/packages/db-dtypes/tests/unit/test_pandas_backports.py @@ -0,0 +1,37 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest.mock as mock + +import db_dtypes.pandas_backports as pandas_backports + + +@mock.patch("builtins.__import__") +def test_import_default_module_found(mock_import): + mock_module = mock.MagicMock() + mock_module.OpsMixin = "OpsMixin_from_module" # Simulate successful import + mock_import.return_value = mock_module + + default_class = type("OpsMixin", (), {}) # Dummy class + result = pandas_backports.import_default("module_name", default=default_class) + assert result == "OpsMixin_from_module" + + +@mock.patch("builtins.__import__") +def test_import_default_module_not_found(mock_import): + mock_import.side_effect = ModuleNotFoundError + + default_class = type("OpsMixin", (), {}) # Dummy class + result = pandas_backports.import_default("module_name", default=default_class) + assert result == default_class From dd28eb693a4639a78161af11674aae3e0d98d0a0 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 5 Feb 2025 14:55:02 -0600 Subject: [PATCH 183/210] chore(main): release 1.4.1 (#322) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/db-dtypes/CHANGELOG.md | 7 +++++++ packages/db-dtypes/db_dtypes/version.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/db-dtypes/CHANGELOG.md b/packages/db-dtypes/CHANGELOG.md index 7700d5aac48b..be42c60b652e 100644 --- a/packages/db-dtypes/CHANGELOG.md +++ b/packages/db-dtypes/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.4.1](https://github.com/googleapis/python-db-dtypes-pandas/compare/v1.4.0...v1.4.1) (2025-01-30) + + +### Bug Fixes + +* Re-add ModuleNotFoundError handler for pandas_backports ([#319](https://github.com/googleapis/python-db-dtypes-pandas/issues/319)) ([931ff8a](https://github.com/googleapis/python-db-dtypes-pandas/commit/931ff8a0f15fb376f77954affb48d1c953094dee)) + ## [1.4.0](https://github.com/googleapis/python-db-dtypes-pandas/compare/v1.3.1...v1.4.0) (2025-01-21) diff --git a/packages/db-dtypes/db_dtypes/version.py b/packages/db-dtypes/db_dtypes/version.py index 2612bd752670..305578023d3a 100644 --- a/packages/db-dtypes/db_dtypes/version.py +++ b/packages/db-dtypes/db_dtypes/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "1.4.0" +__version__ = "1.4.1" From ddab33265311b2c332cd88196ac3a6aa63135c5a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 24 Feb 2025 10:46:49 -0800 Subject: [PATCH 184/210] chore(python): conditionally load credentials in .kokoro/build.sh (#326) Source-Link: https://github.com/googleapis/synthtool/commit/aa69fb74717c8f4c58c60f8cc101d3f4b2c07b09 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:f016446d6e520e5fb552c45b110cba3f217bffdd3d06bdddd076e9e6d13266cf Co-authored-by: Owl Bot --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 4 ++-- packages/db-dtypes/.kokoro/build.sh | 20 ++++++++++++++------ 2 files changed, 16 insertions(+), 8 deletions(-) diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 4c0027ff1c61..3f7634f25f8e 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:04c35dc5f49f0f503a306397d6d043685f8d2bb822ab515818c4208d7fb2db3a -# created: 2025-01-16T15:24:11.364245182Z + digest: sha256:f016446d6e520e5fb552c45b110cba3f217bffdd3d06bdddd076e9e6d13266cf +# created: 2025-02-21T19:32:52.01306189Z diff --git a/packages/db-dtypes/.kokoro/build.sh b/packages/db-dtypes/.kokoro/build.sh index 2731d6f2a78d..d41b45aa1dd0 100755 --- a/packages/db-dtypes/.kokoro/build.sh +++ b/packages/db-dtypes/.kokoro/build.sh @@ -15,11 +15,13 @@ set -eo pipefail +CURRENT_DIR=$(dirname "${BASH_SOURCE[0]}") + if [[ -z "${PROJECT_ROOT:-}" ]]; then - PROJECT_ROOT="github/python-db-dtypes-pandas" + PROJECT_ROOT=$(realpath "${CURRENT_DIR}/..") fi -cd "${PROJECT_ROOT}" +pushd "${PROJECT_ROOT}" # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 @@ -28,10 +30,16 @@ export PYTHONUNBUFFERED=1 env | grep KOKORO # Setup service account credentials. -export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json +if [[ -f "${KOKORO_GFILE_DIR}/service-account.json" ]] +then + export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json +fi # Setup project id. -export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") +if [[ -f "${KOKORO_GFILE_DIR}/project-id.json" ]] +then + export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") +fi # If this is a continuous build, send the test log to the FlakyBot. # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. @@ -46,7 +54,7 @@ fi # If NOX_SESSION is set, it only runs the specified session, # otherwise run all the sessions. if [[ -n "${NOX_SESSION:-}" ]]; then - python3 -m nox -s ${NOX_SESSION:-} + python3 -m nox -s ${NOX_SESSION:-} else - python3 -m nox + python3 -m nox fi From fb55ff0c21ab21351b37d2d097a19b943f2e3348 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 24 Feb 2025 10:51:59 -0800 Subject: [PATCH 185/210] build(deps): bump cryptography from 43.0.1 to 44.0.1 in /.kokoro (#324) Bumps [cryptography](https://github.com/pyca/cryptography) from 43.0.1 to 44.0.1. - [Changelog](https://github.com/pyca/cryptography/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pyca/cryptography/compare/43.0.1...44.0.1) --- updated-dependencies: - dependency-name: cryptography dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- packages/db-dtypes/.kokoro/requirements.txt | 60 +++++++++++---------- 1 file changed, 32 insertions(+), 28 deletions(-) diff --git a/packages/db-dtypes/.kokoro/requirements.txt b/packages/db-dtypes/.kokoro/requirements.txt index 006d8ef931bf..69b9a7bbe759 100644 --- a/packages/db-dtypes/.kokoro/requirements.txt +++ b/packages/db-dtypes/.kokoro/requirements.txt @@ -112,34 +112,38 @@ colorlog==6.8.2 \ # via # gcp-docuploader # nox -cryptography==43.0.1 \ - --hash=sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494 \ - --hash=sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806 \ - --hash=sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d \ - --hash=sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062 \ - --hash=sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2 \ - --hash=sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4 \ - --hash=sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1 \ - --hash=sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85 \ - --hash=sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84 \ - --hash=sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042 \ - --hash=sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d \ - --hash=sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962 \ - --hash=sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2 \ - --hash=sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa \ - --hash=sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d \ - --hash=sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365 \ - --hash=sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96 \ - --hash=sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47 \ - --hash=sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d \ - --hash=sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d \ - --hash=sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c \ - --hash=sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb \ - --hash=sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277 \ - --hash=sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172 \ - --hash=sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034 \ - --hash=sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a \ - --hash=sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289 +cryptography==44.0.1 \ + --hash=sha256:00918d859aa4e57db8299607086f793fa7813ae2ff5a4637e318a25ef82730f7 \ + --hash=sha256:1e8d181e90a777b63f3f0caa836844a1182f1f265687fac2115fcf245f5fbec3 \ + --hash=sha256:1f9a92144fa0c877117e9748c74501bea842f93d21ee00b0cf922846d9d0b183 \ + --hash=sha256:21377472ca4ada2906bc313168c9dc7b1d7ca417b63c1c3011d0c74b7de9ae69 \ + --hash=sha256:24979e9f2040c953a94bf3c6782e67795a4c260734e5264dceea65c8f4bae64a \ + --hash=sha256:2a46a89ad3e6176223b632056f321bc7de36b9f9b93b2cc1cccf935a3849dc62 \ + --hash=sha256:322eb03ecc62784536bc173f1483e76747aafeb69c8728df48537eb431cd1911 \ + --hash=sha256:436df4f203482f41aad60ed1813811ac4ab102765ecae7a2bbb1dbb66dcff5a7 \ + --hash=sha256:4f422e8c6a28cf8b7f883eb790695d6d45b0c385a2583073f3cec434cc705e1a \ + --hash=sha256:53f23339864b617a3dfc2b0ac8d5c432625c80014c25caac9082314e9de56f41 \ + --hash=sha256:5fed5cd6102bb4eb843e3315d2bf25fede494509bddadb81e03a859c1bc17b83 \ + --hash=sha256:610a83540765a8d8ce0f351ce42e26e53e1f774a6efb71eb1b41eb01d01c3d12 \ + --hash=sha256:6c8acf6f3d1f47acb2248ec3ea261171a671f3d9428e34ad0357148d492c7864 \ + --hash=sha256:6f76fdd6fd048576a04c5210d53aa04ca34d2ed63336d4abd306d0cbe298fddf \ + --hash=sha256:72198e2b5925155497a5a3e8c216c7fb3e64c16ccee11f0e7da272fa93b35c4c \ + --hash=sha256:887143b9ff6bad2b7570da75a7fe8bbf5f65276365ac259a5d2d5147a73775f2 \ + --hash=sha256:888fcc3fce0c888785a4876ca55f9f43787f4c5c1cc1e2e0da71ad481ff82c5b \ + --hash=sha256:8e6a85a93d0642bd774460a86513c5d9d80b5c002ca9693e63f6e540f1815ed0 \ + --hash=sha256:94f99f2b943b354a5b6307d7e8d19f5c423a794462bde2bf310c770ba052b1c4 \ + --hash=sha256:9b336599e2cb77b1008cb2ac264b290803ec5e8e89d618a5e978ff5eb6f715d9 \ + --hash=sha256:a2d8a7045e1ab9b9f803f0d9531ead85f90c5f2859e653b61497228b18452008 \ + --hash=sha256:b8272f257cf1cbd3f2e120f14c68bff2b6bdfcc157fafdee84a1b795efd72862 \ + --hash=sha256:bf688f615c29bfe9dfc44312ca470989279f0e94bb9f631f85e3459af8efc009 \ + --hash=sha256:d9c5b9f698a83c8bd71e0f4d3f9f839ef244798e5ffe96febfa9714717db7af7 \ + --hash=sha256:dd7c7e2d71d908dc0f8d2027e1604102140d84b155e658c20e8ad1304317691f \ + --hash=sha256:df978682c1504fc93b3209de21aeabf2375cb1571d4e61907b3e7a2540e83026 \ + --hash=sha256:e403f7f766ded778ecdb790da786b418a9f2394f36e8cc8b796cc056ab05f44f \ + --hash=sha256:eb3889330f2a4a148abead555399ec9a32b13b7c8ba969b72d8e500eb7ef84cd \ + --hash=sha256:f4daefc971c2d1f82f03097dc6f216744a6cd2ac0f04c68fb935ea2ba2a0d420 \ + --hash=sha256:f51f5705ab27898afda1aaa430f34ad90dc117421057782022edf0600bec5f14 \ + --hash=sha256:fd0ee90072861e276b0ff08bd627abec29e32a53b2be44e41dbcdf87cbee2b00 # via # -r requirements.in # gcp-releasetool From 64173306748c5997d72e33af1cdcd4f4ffcc31fd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 24 Feb 2025 13:09:12 -0800 Subject: [PATCH 186/210] build(deps): bump jinja2 from 3.1.4 to 3.1.5 in /.kokoro (#310) * build(deps): bump jinja2 from 3.1.4 to 3.1.5 in /.kokoro Bumps [jinja2](https://github.com/pallets/jinja) from 3.1.4 to 3.1.5. - [Release notes](https://github.com/pallets/jinja/releases) - [Changelog](https://github.com/pallets/jinja/blob/main/CHANGES.rst) - [Commits](https://github.com/pallets/jinja/compare/3.1.4...3.1.5) --- updated-dependencies: - dependency-name: jinja2 dependency-type: indirect ... Signed-off-by: dependabot[bot] * pin ubuntu version to 22.04 --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Lingqing Gan --- packages/db-dtypes/.github/workflows/unittest.yml | 5 ++++- packages/db-dtypes/.kokoro/requirements.txt | 6 +++--- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/packages/db-dtypes/.github/workflows/unittest.yml b/packages/db-dtypes/.github/workflows/unittest.yml index 2386b3aa010b..d919b44ca951 100644 --- a/packages/db-dtypes/.github/workflows/unittest.yml +++ b/packages/db-dtypes/.github/workflows/unittest.yml @@ -5,7 +5,10 @@ on: name: unittest jobs: unit: - runs-on: ubuntu-latest + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2303): use `ubuntu-latest` once this bug is fixed. + # Use ubuntu-22.04 until Python 3.7 is removed from the test matrix + # https://docs.github.com/en/actions/using-github-hosted-runners/using-github-hosted-runners/about-github-hosted-runners#standard-github-hosted-runners-for-public-repositories + runs-on: ubuntu-22.04 strategy: matrix: python: ['3.8', '3.9', '3.10', '3.11', '3.12'] diff --git a/packages/db-dtypes/.kokoro/requirements.txt b/packages/db-dtypes/.kokoro/requirements.txt index 69b9a7bbe759..6ad95a04a419 100644 --- a/packages/db-dtypes/.kokoro/requirements.txt +++ b/packages/db-dtypes/.kokoro/requirements.txt @@ -258,9 +258,9 @@ jeepney==0.8.0 \ # via # keyring # secretstorage -jinja2==3.1.4 \ - --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \ - --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d +jinja2==3.1.5 \ + --hash=sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb \ + --hash=sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb # via gcp-releasetool keyring==25.4.1 \ --hash=sha256:5426f817cf7f6f007ba5ec722b1bcad95a75b27d780343772ad76b17cb47b0bf \ From 328c3e6014b548732d7c211f40ec7b56f75fa73e Mon Sep 17 00:00:00 2001 From: Lingqing Gan Date: Thu, 27 Feb 2025 05:00:38 -0800 Subject: [PATCH 187/210] test: remove pyarrow prerelease pin (#327) The referred upstream issue has been fixed. See: https://togithub.com/apache/arrow/issues/45380 fixes #321 --- packages/db-dtypes/db_dtypes/json.py | 6 ++++-- packages/db-dtypes/noxfile.py | 5 +---- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/packages/db-dtypes/db_dtypes/json.py b/packages/db-dtypes/db_dtypes/json.py index 99e0c67d86cb..835d6387bbc3 100644 --- a/packages/db-dtypes/db_dtypes/json.py +++ b/packages/db-dtypes/db_dtypes/json.py @@ -257,8 +257,10 @@ def __array__(self, dtype=None, copy: bool | None = None) -> np.ndarray: class JSONArrowScalar(pa.ExtensionScalar): - def as_py(self): - return JSONArray._deserialize_json(self.value.as_py() if self.value else None) + def as_py(self, **kwargs): + return JSONArray._deserialize_json( + self.value.as_py(**kwargs) if self.value else None + ) class JSONArrowType(pa.ExtensionType): diff --git a/packages/db-dtypes/noxfile.py b/packages/db-dtypes/noxfile.py index bf5b3d2cf561..c487cd79a6ec 100644 --- a/packages/db-dtypes/noxfile.py +++ b/packages/db-dtypes/noxfile.py @@ -199,10 +199,7 @@ def prerelease(session, tests_path): "--prefer-binary", "--pre", "--upgrade", - # Limit pyarrow to versions prior to 20.0.0.dev19 to prevent a RuntimeWarning - # during import. This workaround can be removed once the underlying issue - # in pyarrow is resolved (see: https://github.com/apache/arrow/issues/45380). - "pyarrow<=20.0.0.dev18", + "pyarrow", ) # Avoid pandas==2.2.0rc0 as this version causes PyArrow to fail. Once newer # prerelease comes out, this constraint can be removed. See From fd03badb6b96e850a5bc50d1e804e4c84ba9fa85 Mon Sep 17 00:00:00 2001 From: Chelsea Lin Date: Tue, 4 Mar 2025 15:02:19 -0800 Subject: [PATCH 188/210] fix: remove unbox json functionality from JSONArrowType (#325) * fix: remove unbox json functionality from JSONArrowType * lint --- packages/db-dtypes/db_dtypes/__init__.py | 3 +- packages/db-dtypes/db_dtypes/json.py | 10 ---- packages/db-dtypes/tests/unit/test_json.py | 70 +++++++++------------- 3 files changed, 28 insertions(+), 55 deletions(-) diff --git a/packages/db-dtypes/db_dtypes/__init__.py b/packages/db-dtypes/db_dtypes/__init__.py index d5b05dc8eaff..2424ff43c636 100644 --- a/packages/db-dtypes/db_dtypes/__init__.py +++ b/packages/db-dtypes/db_dtypes/__init__.py @@ -50,7 +50,7 @@ # To use JSONArray and JSONDtype, you'll need Pandas 1.5.0 or later. With the removal # of Python 3.7 compatibility, the minimum Pandas version will be updated to 1.5.0. if packaging.version.Version(pandas.__version__) >= packaging.version.Version("1.5.0"): - from db_dtypes.json import JSONArray, JSONArrowScalar, JSONArrowType, JSONDtype + from db_dtypes.json import JSONArray, JSONArrowType, JSONDtype else: JSONArray = None JSONDtype = None @@ -375,7 +375,6 @@ def __sub__(self, other): "JSONDtype", "JSONArray", "JSONArrowType", - "JSONArrowScalar", "TimeArray", "TimeDtype", ] diff --git a/packages/db-dtypes/db_dtypes/json.py b/packages/db-dtypes/db_dtypes/json.py index 835d6387bbc3..37aad83da80c 100644 --- a/packages/db-dtypes/db_dtypes/json.py +++ b/packages/db-dtypes/db_dtypes/json.py @@ -256,13 +256,6 @@ def __array__(self, dtype=None, copy: bool | None = None) -> np.ndarray: return result -class JSONArrowScalar(pa.ExtensionScalar): - def as_py(self, **kwargs): - return JSONArray._deserialize_json( - self.value.as_py(**kwargs) if self.value else None - ) - - class JSONArrowType(pa.ExtensionType): """Arrow extension type for the `dbjson` Pandas extension type.""" @@ -282,9 +275,6 @@ def __hash__(self) -> int: def to_pandas_dtype(self): return JSONDtype() - def __arrow_ext_scalar_class__(self): - return JSONArrowScalar - # Register the type to be included in RecordBatches, sent over IPC and received in # another Python process. diff --git a/packages/db-dtypes/tests/unit/test_json.py b/packages/db-dtypes/tests/unit/test_json.py index ff2c86748799..d15cfc768011 100644 --- a/packages/db-dtypes/tests/unit/test_json.py +++ b/packages/db-dtypes/tests/unit/test_json.py @@ -13,7 +13,6 @@ # limitations under the License. import json -import math import numpy as np import pandas as pd @@ -160,20 +159,15 @@ def test_json_arrow_to_pandas(): s = arr.to_pandas() assert isinstance(s.dtypes, db_dtypes.JSONDtype) assert s[0] - assert s[1] == 100 - assert math.isclose(s[2], 0.98) - assert s[3] == "hello world" - assert math.isclose(s[4][0], 0.1) - assert math.isclose(s[4][1], 0.2) - assert s[5] == { - "null_field": None, - "order": { - "items": ["book", "pen", "computer"], - "total": 15, - "address": {"street": "123 Main St", "city": "Anytown"}, - }, - } - assert pd.isna(s[6]) + assert s[1] == "100" + assert s[2] == "0.98" + assert s[3] == '"hello world"' + assert s[4] == "[0.1,0.2]" + assert ( + s[5] + == '{"null_field":null,"order":{"address":{"city":"Anytown","street":"123 Main St"},"items":["book","pen","computer"],"total":15}}' + ) + assert s[6] == "null" def test_json_arrow_to_pylist(): @@ -186,20 +180,15 @@ def test_json_arrow_to_pylist(): s = arr.to_pylist() assert isinstance(s, list) assert s[0] - assert s[1] == 100 - assert math.isclose(s[2], 0.98) - assert s[3] == "hello world" - assert math.isclose(s[4][0], 0.1) - assert math.isclose(s[4][1], 0.2) - assert s[5] == { - "null_field": None, - "order": { - "items": ["book", "pen", "computer"], - "total": 15, - "address": {"street": "123 Main St", "city": "Anytown"}, - }, - } - assert s[6] is None + assert s[1] == "100" + assert s[2] == "0.98" + assert s[3] == '"hello world"' + assert s[4] == "[0.1,0.2]" + assert ( + s[5] + == '{"null_field":null,"order":{"address":{"city":"Anytown","street":"123 Main St"},"items":["book","pen","computer"],"total":15}}' + ) + assert s[6] == "null" def test_json_arrow_record_batch(): @@ -226,17 +215,12 @@ def test_json_arrow_record_batch(): assert isinstance(s, list) assert s[0] - assert s[1] == 100 - assert math.isclose(s[2], 0.98) - assert s[3] == "hello world" - assert math.isclose(s[4][0], 0.1) - assert math.isclose(s[4][1], 0.2) - assert s[5] == { - "null_field": None, - "order": { - "items": ["book", "pen", "computer"], - "total": 15, - "address": {"street": "123 Main St", "city": "Anytown"}, - }, - } - assert s[6] is None + assert s[1] == "100" + assert s[2] == "0.98" + assert s[3] == '"hello world"' + assert s[4] == "[0.1,0.2]" + assert ( + s[5] + == '{"null_field":null,"order":{"address":{"city":"Anytown","street":"123 Main St"},"items":["book","pen","computer"],"total":15}}' + ) + assert s[6] == "null" From 080c989f407bc177ef7067ae7789a130e69b7c5a Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 5 Mar 2025 15:38:18 -0800 Subject: [PATCH 189/210] chore(main): release 1.4.2 (#328) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/db-dtypes/CHANGELOG.md | 7 +++++++ packages/db-dtypes/db_dtypes/version.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/db-dtypes/CHANGELOG.md b/packages/db-dtypes/CHANGELOG.md index be42c60b652e..19fe645618a4 100644 --- a/packages/db-dtypes/CHANGELOG.md +++ b/packages/db-dtypes/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.4.2](https://github.com/googleapis/python-db-dtypes-pandas/compare/v1.4.1...v1.4.2) (2025-03-04) + + +### Bug Fixes + +* Remove unbox json functionality from JSONArrowType ([#325](https://github.com/googleapis/python-db-dtypes-pandas/issues/325)) ([60deef1](https://github.com/googleapis/python-db-dtypes-pandas/commit/60deef1636ba3e4f88725db8b9ce23b634168ac2)) + ## [1.4.1](https://github.com/googleapis/python-db-dtypes-pandas/compare/v1.4.0...v1.4.1) (2025-01-30) diff --git a/packages/db-dtypes/db_dtypes/version.py b/packages/db-dtypes/db_dtypes/version.py index 305578023d3a..c97e3cac4ab7 100644 --- a/packages/db-dtypes/db_dtypes/version.py +++ b/packages/db-dtypes/db_dtypes/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "1.4.1" +__version__ = "1.4.2" From e8bd09bee0247255609f7d5c8ef23676f6b851ac Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 6 Mar 2025 12:51:40 -0500 Subject: [PATCH 190/210] chore: remove unused files (#329) --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 4 +- .../db-dtypes/.kokoro/docker/docs/Dockerfile | 89 --- .../.kokoro/docker/docs/requirements.in | 2 - .../.kokoro/docker/docs/requirements.txt | 297 ---------- packages/db-dtypes/.kokoro/docs/common.cfg | 67 --- .../db-dtypes/.kokoro/docs/docs-presubmit.cfg | 28 - packages/db-dtypes/.kokoro/docs/docs.cfg | 1 - packages/db-dtypes/.kokoro/publish-docs.sh | 58 -- packages/db-dtypes/.kokoro/release.sh | 29 - packages/db-dtypes/.kokoro/release/common.cfg | 43 -- .../db-dtypes/.kokoro/release/release.cfg | 1 - packages/db-dtypes/.kokoro/requirements.in | 11 - packages/db-dtypes/.kokoro/requirements.txt | 513 ------------------ 13 files changed, 2 insertions(+), 1141 deletions(-) delete mode 100644 packages/db-dtypes/.kokoro/docker/docs/Dockerfile delete mode 100644 packages/db-dtypes/.kokoro/docker/docs/requirements.in delete mode 100644 packages/db-dtypes/.kokoro/docker/docs/requirements.txt delete mode 100644 packages/db-dtypes/.kokoro/docs/common.cfg delete mode 100644 packages/db-dtypes/.kokoro/docs/docs-presubmit.cfg delete mode 100644 packages/db-dtypes/.kokoro/docs/docs.cfg delete mode 100755 packages/db-dtypes/.kokoro/publish-docs.sh delete mode 100755 packages/db-dtypes/.kokoro/release.sh delete mode 100644 packages/db-dtypes/.kokoro/release/common.cfg delete mode 100644 packages/db-dtypes/.kokoro/release/release.cfg delete mode 100644 packages/db-dtypes/.kokoro/requirements.in delete mode 100644 packages/db-dtypes/.kokoro/requirements.txt diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml index 3f7634f25f8e..c631e1f7d7e9 100644 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ b/packages/db-dtypes/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:f016446d6e520e5fb552c45b110cba3f217bffdd3d06bdddd076e9e6d13266cf -# created: 2025-02-21T19:32:52.01306189Z + digest: sha256:5581906b957284864632cde4e9c51d1cc66b0094990b27e689132fe5cd036046 +# created: 2025-03-05 diff --git a/packages/db-dtypes/.kokoro/docker/docs/Dockerfile b/packages/db-dtypes/.kokoro/docker/docs/Dockerfile deleted file mode 100644 index e5410e296bd8..000000000000 --- a/packages/db-dtypes/.kokoro/docker/docs/Dockerfile +++ /dev/null @@ -1,89 +0,0 @@ -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from ubuntu:24.04 - -ENV DEBIAN_FRONTEND noninteractive - -# Ensure local Python is preferred over distribution Python. -ENV PATH /usr/local/bin:$PATH - -# Install dependencies. -RUN apt-get update \ - && apt-get install -y --no-install-recommends \ - apt-transport-https \ - build-essential \ - ca-certificates \ - curl \ - dirmngr \ - git \ - gpg-agent \ - graphviz \ - libbz2-dev \ - libdb5.3-dev \ - libexpat1-dev \ - libffi-dev \ - liblzma-dev \ - libreadline-dev \ - libsnappy-dev \ - libssl-dev \ - libsqlite3-dev \ - portaudio19-dev \ - redis-server \ - software-properties-common \ - ssh \ - sudo \ - tcl \ - tcl-dev \ - tk \ - tk-dev \ - uuid-dev \ - wget \ - zlib1g-dev \ - && add-apt-repository universe \ - && apt-get update \ - && apt-get -y install jq \ - && apt-get clean autoclean \ - && apt-get autoremove -y \ - && rm -rf /var/lib/apt/lists/* \ - && rm -f /var/cache/apt/archives/*.deb - - -###################### Install python 3.10.14 for docs/docfx session - -# Download python 3.10.14 -RUN wget https://www.python.org/ftp/python/3.10.14/Python-3.10.14.tgz - -# Extract files -RUN tar -xvf Python-3.10.14.tgz - -# Install python 3.10.14 -RUN ./Python-3.10.14/configure --enable-optimizations -RUN make altinstall - -ENV PATH /usr/local/bin/python3.10:$PATH - -###################### Install pip -RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ - && python3.10 /tmp/get-pip.py \ - && rm /tmp/get-pip.py - -# Test pip -RUN python3.10 -m pip - -# Install build requirements -COPY requirements.txt /requirements.txt -RUN python3.10 -m pip install --require-hashes -r requirements.txt - -CMD ["python3.10"] diff --git a/packages/db-dtypes/.kokoro/docker/docs/requirements.in b/packages/db-dtypes/.kokoro/docker/docs/requirements.in deleted file mode 100644 index 586bd07037ae..000000000000 --- a/packages/db-dtypes/.kokoro/docker/docs/requirements.in +++ /dev/null @@ -1,2 +0,0 @@ -nox -gcp-docuploader diff --git a/packages/db-dtypes/.kokoro/docker/docs/requirements.txt b/packages/db-dtypes/.kokoro/docker/docs/requirements.txt deleted file mode 100644 index a9360a25b707..000000000000 --- a/packages/db-dtypes/.kokoro/docker/docs/requirements.txt +++ /dev/null @@ -1,297 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: -# -# pip-compile --allow-unsafe --generate-hashes requirements.in -# -argcomplete==3.5.3 \ - --hash=sha256:2ab2c4a215c59fd6caaff41a869480a23e8f6a5f910b266c1808037f4e375b61 \ - --hash=sha256:c12bf50eded8aebb298c7b7da7a5ff3ee24dffd9f5281867dfe1424b58c55392 - # via nox -cachetools==5.5.0 \ - --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \ - --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a - # via google-auth -certifi==2024.12.14 \ - --hash=sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56 \ - --hash=sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db - # via requests -charset-normalizer==3.4.1 \ - --hash=sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537 \ - --hash=sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa \ - --hash=sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a \ - --hash=sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294 \ - --hash=sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b \ - --hash=sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd \ - --hash=sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601 \ - --hash=sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd \ - --hash=sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4 \ - --hash=sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d \ - --hash=sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2 \ - --hash=sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313 \ - --hash=sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd \ - --hash=sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa \ - --hash=sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8 \ - --hash=sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1 \ - --hash=sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2 \ - --hash=sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496 \ - --hash=sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d \ - --hash=sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b \ - --hash=sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e \ - --hash=sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a \ - --hash=sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4 \ - --hash=sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca \ - --hash=sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78 \ - --hash=sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408 \ - --hash=sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5 \ - --hash=sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3 \ - --hash=sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f \ - --hash=sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a \ - --hash=sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765 \ - --hash=sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6 \ - --hash=sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146 \ - --hash=sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6 \ - --hash=sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9 \ - --hash=sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd \ - --hash=sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c \ - --hash=sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f \ - --hash=sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545 \ - --hash=sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176 \ - --hash=sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770 \ - --hash=sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824 \ - --hash=sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f \ - --hash=sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf \ - --hash=sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487 \ - --hash=sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d \ - --hash=sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd \ - --hash=sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b \ - --hash=sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534 \ - --hash=sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f \ - --hash=sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b \ - --hash=sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9 \ - --hash=sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd \ - --hash=sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125 \ - --hash=sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9 \ - --hash=sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de \ - --hash=sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11 \ - --hash=sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d \ - --hash=sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35 \ - --hash=sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f \ - --hash=sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda \ - --hash=sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7 \ - --hash=sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a \ - --hash=sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971 \ - --hash=sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8 \ - --hash=sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41 \ - --hash=sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d \ - --hash=sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f \ - --hash=sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757 \ - --hash=sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a \ - --hash=sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886 \ - --hash=sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77 \ - --hash=sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76 \ - --hash=sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247 \ - --hash=sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85 \ - --hash=sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb \ - --hash=sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7 \ - --hash=sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e \ - --hash=sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6 \ - --hash=sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037 \ - --hash=sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1 \ - --hash=sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e \ - --hash=sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807 \ - --hash=sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407 \ - --hash=sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c \ - --hash=sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12 \ - --hash=sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3 \ - --hash=sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089 \ - --hash=sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd \ - --hash=sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e \ - --hash=sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00 \ - --hash=sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616 - # via requests -click==8.1.8 \ - --hash=sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2 \ - --hash=sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a - # via gcp-docuploader -colorlog==6.9.0 \ - --hash=sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff \ - --hash=sha256:bfba54a1b93b94f54e1f4fe48395725a3d92fd2a4af702f6bd70946bdc0c6ac2 - # via - # gcp-docuploader - # nox -distlib==0.3.9 \ - --hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \ - --hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403 - # via virtualenv -filelock==3.16.1 \ - --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \ - --hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435 - # via virtualenv -gcp-docuploader==0.6.5 \ - --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ - --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea - # via -r requirements.in -google-api-core==2.24.0 \ - --hash=sha256:10d82ac0fca69c82a25b3efdeefccf6f28e02ebb97925a8cce8edbfe379929d9 \ - --hash=sha256:e255640547a597a4da010876d333208ddac417d60add22b6851a0c66a831fcaf - # via - # google-cloud-core - # google-cloud-storage -google-auth==2.37.0 \ - --hash=sha256:0054623abf1f9c83492c63d3f47e77f0a544caa3d40b2d98e099a611c2dd5d00 \ - --hash=sha256:42664f18290a6be591be5329a96fe30184be1a1badb7292a7f686a9659de9ca0 - # via - # google-api-core - # google-cloud-core - # google-cloud-storage -google-cloud-core==2.4.1 \ - --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \ - --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61 - # via google-cloud-storage -google-cloud-storage==2.19.0 \ - --hash=sha256:aeb971b5c29cf8ab98445082cbfe7b161a1f48ed275822f59ed3f1524ea54fba \ - --hash=sha256:cd05e9e7191ba6cb68934d8eb76054d9be4562aa89dbc4236feee4d7d51342b2 - # via gcp-docuploader -google-crc32c==1.6.0 \ - --hash=sha256:05e2d8c9a2f853ff116db9706b4a27350587f341eda835f46db3c0a8c8ce2f24 \ - --hash=sha256:18e311c64008f1f1379158158bb3f0c8d72635b9eb4f9545f8cf990c5668e59d \ - --hash=sha256:236c87a46cdf06384f614e9092b82c05f81bd34b80248021f729396a78e55d7e \ - --hash=sha256:35834855408429cecf495cac67ccbab802de269e948e27478b1e47dfb6465e57 \ - --hash=sha256:386122eeaaa76951a8196310432c5b0ef3b53590ef4c317ec7588ec554fec5d2 \ - --hash=sha256:40b05ab32a5067525670880eb5d169529089a26fe35dce8891127aeddc1950e8 \ - --hash=sha256:48abd62ca76a2cbe034542ed1b6aee851b6f28aaca4e6551b5599b6f3ef175cc \ - --hash=sha256:50cf2a96da226dcbff8671233ecf37bf6e95de98b2a2ebadbfdf455e6d05df42 \ - --hash=sha256:51c4f54dd8c6dfeb58d1df5e4f7f97df8abf17a36626a217f169893d1d7f3e9f \ - --hash=sha256:5bcc90b34df28a4b38653c36bb5ada35671ad105c99cfe915fb5bed7ad6924aa \ - --hash=sha256:62f6d4a29fea082ac4a3c9be5e415218255cf11684ac6ef5488eea0c9132689b \ - --hash=sha256:6eceb6ad197656a1ff49ebfbbfa870678c75be4344feb35ac1edf694309413dc \ - --hash=sha256:7aec8e88a3583515f9e0957fe4f5f6d8d4997e36d0f61624e70469771584c760 \ - --hash=sha256:91ca8145b060679ec9176e6de4f89b07363d6805bd4760631ef254905503598d \ - --hash=sha256:a184243544811e4a50d345838a883733461e67578959ac59964e43cca2c791e7 \ - --hash=sha256:a9e4b426c3702f3cd23b933436487eb34e01e00327fac20c9aebb68ccf34117d \ - --hash=sha256:bb0966e1c50d0ef5bc743312cc730b533491d60585a9a08f897274e57c3f70e0 \ - --hash=sha256:bb8b3c75bd157010459b15222c3fd30577042a7060e29d42dabce449c087f2b3 \ - --hash=sha256:bd5e7d2445d1a958c266bfa5d04c39932dc54093fa391736dbfdb0f1929c1fb3 \ - --hash=sha256:c87d98c7c4a69066fd31701c4e10d178a648c2cac3452e62c6b24dc51f9fcc00 \ - --hash=sha256:d2952396dc604544ea7476b33fe87faedc24d666fb0c2d5ac971a2b9576ab871 \ - --hash=sha256:d8797406499f28b5ef791f339594b0b5fdedf54e203b5066675c406ba69d705c \ - --hash=sha256:d9e9913f7bd69e093b81da4535ce27af842e7bf371cde42d1ae9e9bd382dc0e9 \ - --hash=sha256:e2806553238cd076f0a55bddab37a532b53580e699ed8e5606d0de1f856b5205 \ - --hash=sha256:ebab974b1687509e5c973b5c4b8b146683e101e102e17a86bd196ecaa4d099fc \ - --hash=sha256:ed767bf4ba90104c1216b68111613f0d5926fb3780660ea1198fc469af410e9d \ - --hash=sha256:f7a1fc29803712f80879b0806cb83ab24ce62fc8daf0569f2204a0cfd7f68ed4 - # via - # google-cloud-storage - # google-resumable-media -google-resumable-media==2.7.2 \ - --hash=sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa \ - --hash=sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0 - # via google-cloud-storage -googleapis-common-protos==1.66.0 \ - --hash=sha256:c3e7b33d15fdca5374cc0a7346dd92ffa847425cc4ea941d970f13680052ec8c \ - --hash=sha256:d7abcd75fabb2e0ec9f74466401f6c119a0b498e27370e9be4c94cb7e382b8ed - # via google-api-core -idna==3.10 \ - --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ - --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 - # via requests -nox==2024.10.9 \ - --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ - --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 - # via -r requirements.in -packaging==24.2 \ - --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ - --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f - # via nox -platformdirs==4.3.6 \ - --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ - --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb - # via virtualenv -proto-plus==1.25.0 \ - --hash=sha256:c91fc4a65074ade8e458e95ef8bac34d4008daa7cce4a12d6707066fca648961 \ - --hash=sha256:fbb17f57f7bd05a68b7707e745e26528b0b3c34e378db91eef93912c54982d91 - # via google-api-core -protobuf==5.29.3 \ - --hash=sha256:0a18ed4a24198528f2333802eb075e59dea9d679ab7a6c5efb017a59004d849f \ - --hash=sha256:0eb32bfa5219fc8d4111803e9a690658aa2e6366384fd0851064b963b6d1f2a7 \ - --hash=sha256:3ea51771449e1035f26069c4c7fd51fba990d07bc55ba80701c78f886bf9c888 \ - --hash=sha256:5da0f41edaf117bde316404bad1a486cb4ededf8e4a54891296f648e8e076620 \ - --hash=sha256:6ce8cc3389a20693bfde6c6562e03474c40851b44975c9b2bf6df7d8c4f864da \ - --hash=sha256:84a57163a0ccef3f96e4b6a20516cedcf5bb3a95a657131c5c3ac62200d23252 \ - --hash=sha256:a4fa6f80816a9a0678429e84973f2f98cbc218cca434abe8db2ad0bffc98503a \ - --hash=sha256:a8434404bbf139aa9e1300dbf989667a83d42ddda9153d8ab76e0d5dcaca484e \ - --hash=sha256:b89c115d877892a512f79a8114564fb435943b59067615894c3b13cd3e1fa107 \ - --hash=sha256:c027e08a08be10b67c06bf2370b99c811c466398c357e615ca88c91c07f0910f \ - --hash=sha256:daaf63f70f25e8689c072cfad4334ca0ac1d1e05a92fc15c54eb9cf23c3efd84 - # via - # gcp-docuploader - # google-api-core - # googleapis-common-protos - # proto-plus -pyasn1==0.6.1 \ - --hash=sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629 \ - --hash=sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034 - # via - # pyasn1-modules - # rsa -pyasn1-modules==0.4.1 \ - --hash=sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd \ - --hash=sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c - # via google-auth -requests==2.32.3 \ - --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ - --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 - # via - # google-api-core - # google-cloud-storage -rsa==4.9 \ - --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ - --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 - # via google-auth -six==1.17.0 \ - --hash=sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274 \ - --hash=sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81 - # via gcp-docuploader -tomli==2.2.1 \ - --hash=sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6 \ - --hash=sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd \ - --hash=sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c \ - --hash=sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b \ - --hash=sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8 \ - --hash=sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6 \ - --hash=sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77 \ - --hash=sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff \ - --hash=sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea \ - --hash=sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192 \ - --hash=sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249 \ - --hash=sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee \ - --hash=sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4 \ - --hash=sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98 \ - --hash=sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8 \ - --hash=sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4 \ - --hash=sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281 \ - --hash=sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744 \ - --hash=sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69 \ - --hash=sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13 \ - --hash=sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140 \ - --hash=sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e \ - --hash=sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e \ - --hash=sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc \ - --hash=sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff \ - --hash=sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec \ - --hash=sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2 \ - --hash=sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222 \ - --hash=sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106 \ - --hash=sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272 \ - --hash=sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a \ - --hash=sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7 - # via nox -urllib3==2.3.0 \ - --hash=sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df \ - --hash=sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d - # via requests -virtualenv==20.28.1 \ - --hash=sha256:412773c85d4dab0409b83ec36f7a6499e72eaf08c80e81e9576bca61831c71cb \ - --hash=sha256:5d34ab240fdb5d21549b76f9e8ff3af28252f5499fb6d6f031adac4e5a8c5329 - # via nox diff --git a/packages/db-dtypes/.kokoro/docs/common.cfg b/packages/db-dtypes/.kokoro/docs/common.cfg deleted file mode 100644 index 5d4f68dbfa7e..000000000000 --- a/packages/db-dtypes/.kokoro/docs/common.cfg +++ /dev/null @@ -1,67 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-db-dtypes-pandas/.kokoro/trampoline_v2.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-lib-docs" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-db-dtypes-pandas/.kokoro/publish-docs.sh" -} - -env_vars: { - key: "STAGING_BUCKET" - value: "docs-staging" -} - -env_vars: { - key: "V2_STAGING_BUCKET" - # Push non-cloud library docs to `docs-staging-v2-dev` instead of the - # Cloud RAD bucket `docs-staging-v2` - value: "docs-staging-v2-dev" -} - -# It will upload the docker image after successful builds. -env_vars: { - key: "TRAMPOLINE_IMAGE_UPLOAD" - value: "true" -} - -# It will always build the docker image. -env_vars: { - key: "TRAMPOLINE_DOCKERFILE" - value: ".kokoro/docker/docs/Dockerfile" -} - -# Fetch the token needed for reporting release status to GitHub -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "yoshi-automation-github-key" - } - } -} - -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "docuploader_service_account" - } - } -} diff --git a/packages/db-dtypes/.kokoro/docs/docs-presubmit.cfg b/packages/db-dtypes/.kokoro/docs/docs-presubmit.cfg deleted file mode 100644 index 05d5574fc945..000000000000 --- a/packages/db-dtypes/.kokoro/docs/docs-presubmit.cfg +++ /dev/null @@ -1,28 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "STAGING_BUCKET" - value: "gcloud-python-test" -} - -env_vars: { - key: "V2_STAGING_BUCKET" - value: "gcloud-python-test" -} - -# We only upload the image in the main `docs` build. -env_vars: { - key: "TRAMPOLINE_IMAGE_UPLOAD" - value: "false" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-db-dtypes-pandas/.kokoro/build.sh" -} - -# Only run this nox session. -env_vars: { - key: "NOX_SESSION" - value: "docs docfx" -} diff --git a/packages/db-dtypes/.kokoro/docs/docs.cfg b/packages/db-dtypes/.kokoro/docs/docs.cfg deleted file mode 100644 index 8f43917d92fe..000000000000 --- a/packages/db-dtypes/.kokoro/docs/docs.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/publish-docs.sh b/packages/db-dtypes/.kokoro/publish-docs.sh deleted file mode 100755 index 4ed4aaf1346f..000000000000 --- a/packages/db-dtypes/.kokoro/publish-docs.sh +++ /dev/null @@ -1,58 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -export PATH="${HOME}/.local/bin:${PATH}" - -# build docs -nox -s docs - -# create metadata -python3.10 -m docuploader create-metadata \ - --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ - --version=$(python3.10 setup.py --version) \ - --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ - --distribution-name=$(python3.10 setup.py --name) \ - --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ - --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ - --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) - -cat docs.metadata - -# upload docs -python3.10 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}" - - -# docfx yaml files -nox -s docfx - -# create metadata. -python3.10 -m docuploader create-metadata \ - --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ - --version=$(python3.10 setup.py --version) \ - --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ - --distribution-name=$(python3.10 setup.py --name) \ - --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ - --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ - --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) - -cat docs.metadata - -# upload docs -python3.10 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}" diff --git a/packages/db-dtypes/.kokoro/release.sh b/packages/db-dtypes/.kokoro/release.sh deleted file mode 100755 index 6e5aac844acc..000000000000 --- a/packages/db-dtypes/.kokoro/release.sh +++ /dev/null @@ -1,29 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -# Start the releasetool reporter -python3 -m pip install --require-hashes -r github/python-db-dtypes-pandas/.kokoro/requirements.txt -python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-3") -cd github/python-db-dtypes-pandas -python3 setup.py sdist bdist_wheel -twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/packages/db-dtypes/.kokoro/release/common.cfg b/packages/db-dtypes/.kokoro/release/common.cfg deleted file mode 100644 index 7b628ca09880..000000000000 --- a/packages/db-dtypes/.kokoro/release/common.cfg +++ /dev/null @@ -1,43 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-db-dtypes-pandas/.kokoro/trampoline.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-db-dtypes-pandas/.kokoro/release.sh" -} - -# Fetch PyPI password -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "google-cloud-pypi-token-keystore-3" - } - } -} - -# Store the packages we uploaded to PyPI. That way, we have a record of exactly -# what we published, which we can use to generate SBOMs and attestations. -action { - define_artifacts { - regex: "github/python-db-dtypes-pandas/**/*.tar.gz" - strip_prefix: "github/python-db-dtypes-pandas" - } -} diff --git a/packages/db-dtypes/.kokoro/release/release.cfg b/packages/db-dtypes/.kokoro/release/release.cfg deleted file mode 100644 index 8f43917d92fe..000000000000 --- a/packages/db-dtypes/.kokoro/release/release.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/requirements.in b/packages/db-dtypes/.kokoro/requirements.in deleted file mode 100644 index fff4d9ce0d0a..000000000000 --- a/packages/db-dtypes/.kokoro/requirements.in +++ /dev/null @@ -1,11 +0,0 @@ -gcp-docuploader -gcp-releasetool>=2 # required for compatibility with cryptography>=42.x -importlib-metadata -typing-extensions -twine -wheel -setuptools -nox>=2022.11.21 # required to remove dependency on py -charset-normalizer<3 -click<8.1.0 -cryptography>=42.0.5 diff --git a/packages/db-dtypes/.kokoro/requirements.txt b/packages/db-dtypes/.kokoro/requirements.txt deleted file mode 100644 index 6ad95a04a419..000000000000 --- a/packages/db-dtypes/.kokoro/requirements.txt +++ /dev/null @@ -1,513 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.9 -# by the following command: -# -# pip-compile --allow-unsafe --generate-hashes requirements.in -# -argcomplete==3.5.1 \ - --hash=sha256:1a1d148bdaa3e3b93454900163403df41448a248af01b6e849edc5ac08e6c363 \ - --hash=sha256:eb1ee355aa2557bd3d0145de7b06b2a45b0ce461e1e7813f5d066039ab4177b4 - # via nox -attrs==24.2.0 \ - --hash=sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346 \ - --hash=sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2 - # via gcp-releasetool -backports-tarfile==1.2.0 \ - --hash=sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34 \ - --hash=sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991 - # via jaraco-context -cachetools==5.5.0 \ - --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \ - --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a - # via google-auth -certifi==2024.8.30 \ - --hash=sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8 \ - --hash=sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9 - # via requests -cffi==1.17.1 \ - --hash=sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8 \ - --hash=sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2 \ - --hash=sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1 \ - --hash=sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15 \ - --hash=sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36 \ - --hash=sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824 \ - --hash=sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8 \ - --hash=sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36 \ - --hash=sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17 \ - --hash=sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf \ - --hash=sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc \ - --hash=sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3 \ - --hash=sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed \ - --hash=sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702 \ - --hash=sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1 \ - --hash=sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8 \ - --hash=sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903 \ - --hash=sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6 \ - --hash=sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d \ - --hash=sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b \ - --hash=sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e \ - --hash=sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be \ - --hash=sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c \ - --hash=sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683 \ - --hash=sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9 \ - --hash=sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c \ - --hash=sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8 \ - --hash=sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1 \ - --hash=sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4 \ - --hash=sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655 \ - --hash=sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67 \ - --hash=sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595 \ - --hash=sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0 \ - --hash=sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65 \ - --hash=sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41 \ - --hash=sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6 \ - --hash=sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401 \ - --hash=sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6 \ - --hash=sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3 \ - --hash=sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16 \ - --hash=sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93 \ - --hash=sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e \ - --hash=sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4 \ - --hash=sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964 \ - --hash=sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c \ - --hash=sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576 \ - --hash=sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0 \ - --hash=sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3 \ - --hash=sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662 \ - --hash=sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3 \ - --hash=sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff \ - --hash=sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5 \ - --hash=sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd \ - --hash=sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f \ - --hash=sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5 \ - --hash=sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14 \ - --hash=sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d \ - --hash=sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9 \ - --hash=sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7 \ - --hash=sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382 \ - --hash=sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a \ - --hash=sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e \ - --hash=sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a \ - --hash=sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4 \ - --hash=sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99 \ - --hash=sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87 \ - --hash=sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b - # via cryptography -charset-normalizer==2.1.1 \ - --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ - --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f - # via - # -r requirements.in - # requests -click==8.0.4 \ - --hash=sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1 \ - --hash=sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb - # via - # -r requirements.in - # gcp-docuploader - # gcp-releasetool -colorlog==6.8.2 \ - --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ - --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33 - # via - # gcp-docuploader - # nox -cryptography==44.0.1 \ - --hash=sha256:00918d859aa4e57db8299607086f793fa7813ae2ff5a4637e318a25ef82730f7 \ - --hash=sha256:1e8d181e90a777b63f3f0caa836844a1182f1f265687fac2115fcf245f5fbec3 \ - --hash=sha256:1f9a92144fa0c877117e9748c74501bea842f93d21ee00b0cf922846d9d0b183 \ - --hash=sha256:21377472ca4ada2906bc313168c9dc7b1d7ca417b63c1c3011d0c74b7de9ae69 \ - --hash=sha256:24979e9f2040c953a94bf3c6782e67795a4c260734e5264dceea65c8f4bae64a \ - --hash=sha256:2a46a89ad3e6176223b632056f321bc7de36b9f9b93b2cc1cccf935a3849dc62 \ - --hash=sha256:322eb03ecc62784536bc173f1483e76747aafeb69c8728df48537eb431cd1911 \ - --hash=sha256:436df4f203482f41aad60ed1813811ac4ab102765ecae7a2bbb1dbb66dcff5a7 \ - --hash=sha256:4f422e8c6a28cf8b7f883eb790695d6d45b0c385a2583073f3cec434cc705e1a \ - --hash=sha256:53f23339864b617a3dfc2b0ac8d5c432625c80014c25caac9082314e9de56f41 \ - --hash=sha256:5fed5cd6102bb4eb843e3315d2bf25fede494509bddadb81e03a859c1bc17b83 \ - --hash=sha256:610a83540765a8d8ce0f351ce42e26e53e1f774a6efb71eb1b41eb01d01c3d12 \ - --hash=sha256:6c8acf6f3d1f47acb2248ec3ea261171a671f3d9428e34ad0357148d492c7864 \ - --hash=sha256:6f76fdd6fd048576a04c5210d53aa04ca34d2ed63336d4abd306d0cbe298fddf \ - --hash=sha256:72198e2b5925155497a5a3e8c216c7fb3e64c16ccee11f0e7da272fa93b35c4c \ - --hash=sha256:887143b9ff6bad2b7570da75a7fe8bbf5f65276365ac259a5d2d5147a73775f2 \ - --hash=sha256:888fcc3fce0c888785a4876ca55f9f43787f4c5c1cc1e2e0da71ad481ff82c5b \ - --hash=sha256:8e6a85a93d0642bd774460a86513c5d9d80b5c002ca9693e63f6e540f1815ed0 \ - --hash=sha256:94f99f2b943b354a5b6307d7e8d19f5c423a794462bde2bf310c770ba052b1c4 \ - --hash=sha256:9b336599e2cb77b1008cb2ac264b290803ec5e8e89d618a5e978ff5eb6f715d9 \ - --hash=sha256:a2d8a7045e1ab9b9f803f0d9531ead85f90c5f2859e653b61497228b18452008 \ - --hash=sha256:b8272f257cf1cbd3f2e120f14c68bff2b6bdfcc157fafdee84a1b795efd72862 \ - --hash=sha256:bf688f615c29bfe9dfc44312ca470989279f0e94bb9f631f85e3459af8efc009 \ - --hash=sha256:d9c5b9f698a83c8bd71e0f4d3f9f839ef244798e5ffe96febfa9714717db7af7 \ - --hash=sha256:dd7c7e2d71d908dc0f8d2027e1604102140d84b155e658c20e8ad1304317691f \ - --hash=sha256:df978682c1504fc93b3209de21aeabf2375cb1571d4e61907b3e7a2540e83026 \ - --hash=sha256:e403f7f766ded778ecdb790da786b418a9f2394f36e8cc8b796cc056ab05f44f \ - --hash=sha256:eb3889330f2a4a148abead555399ec9a32b13b7c8ba969b72d8e500eb7ef84cd \ - --hash=sha256:f4daefc971c2d1f82f03097dc6f216744a6cd2ac0f04c68fb935ea2ba2a0d420 \ - --hash=sha256:f51f5705ab27898afda1aaa430f34ad90dc117421057782022edf0600bec5f14 \ - --hash=sha256:fd0ee90072861e276b0ff08bd627abec29e32a53b2be44e41dbcdf87cbee2b00 - # via - # -r requirements.in - # gcp-releasetool - # secretstorage -distlib==0.3.9 \ - --hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \ - --hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403 - # via virtualenv -docutils==0.21.2 \ - --hash=sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f \ - --hash=sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2 - # via readme-renderer -filelock==3.16.1 \ - --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \ - --hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435 - # via virtualenv -gcp-docuploader==0.6.5 \ - --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ - --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea - # via -r requirements.in -gcp-releasetool==2.1.1 \ - --hash=sha256:25639269f4eae510094f9dbed9894977e1966933211eb155a451deebc3fc0b30 \ - --hash=sha256:845f4ded3d9bfe8cc7fdaad789e83f4ea014affa77785259a7ddac4b243e099e - # via -r requirements.in -google-api-core==2.21.0 \ - --hash=sha256:4a152fd11a9f774ea606388d423b68aa7e6d6a0ffe4c8266f74979613ec09f81 \ - --hash=sha256:6869eacb2a37720380ba5898312af79a4d30b8bca1548fb4093e0697dc4bdf5d - # via - # google-cloud-core - # google-cloud-storage -google-auth==2.35.0 \ - --hash=sha256:25df55f327ef021de8be50bad0dfd4a916ad0de96da86cd05661c9297723ad3f \ - --hash=sha256:f4c64ed4e01e8e8b646ef34c018f8bf3338df0c8e37d8b3bba40e7f574a3278a - # via - # gcp-releasetool - # google-api-core - # google-cloud-core - # google-cloud-storage -google-cloud-core==2.4.1 \ - --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \ - --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61 - # via google-cloud-storage -google-cloud-storage==2.18.2 \ - --hash=sha256:97a4d45c368b7d401ed48c4fdfe86e1e1cb96401c9e199e419d289e2c0370166 \ - --hash=sha256:aaf7acd70cdad9f274d29332673fcab98708d0e1f4dceb5a5356aaef06af4d99 - # via gcp-docuploader -google-crc32c==1.6.0 \ - --hash=sha256:05e2d8c9a2f853ff116db9706b4a27350587f341eda835f46db3c0a8c8ce2f24 \ - --hash=sha256:18e311c64008f1f1379158158bb3f0c8d72635b9eb4f9545f8cf990c5668e59d \ - --hash=sha256:236c87a46cdf06384f614e9092b82c05f81bd34b80248021f729396a78e55d7e \ - --hash=sha256:35834855408429cecf495cac67ccbab802de269e948e27478b1e47dfb6465e57 \ - --hash=sha256:386122eeaaa76951a8196310432c5b0ef3b53590ef4c317ec7588ec554fec5d2 \ - --hash=sha256:40b05ab32a5067525670880eb5d169529089a26fe35dce8891127aeddc1950e8 \ - --hash=sha256:48abd62ca76a2cbe034542ed1b6aee851b6f28aaca4e6551b5599b6f3ef175cc \ - --hash=sha256:50cf2a96da226dcbff8671233ecf37bf6e95de98b2a2ebadbfdf455e6d05df42 \ - --hash=sha256:51c4f54dd8c6dfeb58d1df5e4f7f97df8abf17a36626a217f169893d1d7f3e9f \ - --hash=sha256:5bcc90b34df28a4b38653c36bb5ada35671ad105c99cfe915fb5bed7ad6924aa \ - --hash=sha256:62f6d4a29fea082ac4a3c9be5e415218255cf11684ac6ef5488eea0c9132689b \ - --hash=sha256:6eceb6ad197656a1ff49ebfbbfa870678c75be4344feb35ac1edf694309413dc \ - --hash=sha256:7aec8e88a3583515f9e0957fe4f5f6d8d4997e36d0f61624e70469771584c760 \ - --hash=sha256:91ca8145b060679ec9176e6de4f89b07363d6805bd4760631ef254905503598d \ - --hash=sha256:a184243544811e4a50d345838a883733461e67578959ac59964e43cca2c791e7 \ - --hash=sha256:a9e4b426c3702f3cd23b933436487eb34e01e00327fac20c9aebb68ccf34117d \ - --hash=sha256:bb0966e1c50d0ef5bc743312cc730b533491d60585a9a08f897274e57c3f70e0 \ - --hash=sha256:bb8b3c75bd157010459b15222c3fd30577042a7060e29d42dabce449c087f2b3 \ - --hash=sha256:bd5e7d2445d1a958c266bfa5d04c39932dc54093fa391736dbfdb0f1929c1fb3 \ - --hash=sha256:c87d98c7c4a69066fd31701c4e10d178a648c2cac3452e62c6b24dc51f9fcc00 \ - --hash=sha256:d2952396dc604544ea7476b33fe87faedc24d666fb0c2d5ac971a2b9576ab871 \ - --hash=sha256:d8797406499f28b5ef791f339594b0b5fdedf54e203b5066675c406ba69d705c \ - --hash=sha256:d9e9913f7bd69e093b81da4535ce27af842e7bf371cde42d1ae9e9bd382dc0e9 \ - --hash=sha256:e2806553238cd076f0a55bddab37a532b53580e699ed8e5606d0de1f856b5205 \ - --hash=sha256:ebab974b1687509e5c973b5c4b8b146683e101e102e17a86bd196ecaa4d099fc \ - --hash=sha256:ed767bf4ba90104c1216b68111613f0d5926fb3780660ea1198fc469af410e9d \ - --hash=sha256:f7a1fc29803712f80879b0806cb83ab24ce62fc8daf0569f2204a0cfd7f68ed4 - # via - # google-cloud-storage - # google-resumable-media -google-resumable-media==2.7.2 \ - --hash=sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa \ - --hash=sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0 - # via google-cloud-storage -googleapis-common-protos==1.65.0 \ - --hash=sha256:2972e6c496f435b92590fd54045060867f3fe9be2c82ab148fc8885035479a63 \ - --hash=sha256:334a29d07cddc3aa01dee4988f9afd9b2916ee2ff49d6b757155dc0d197852c0 - # via google-api-core -idna==3.10 \ - --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ - --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 - # via requests -importlib-metadata==8.5.0 \ - --hash=sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b \ - --hash=sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7 - # via - # -r requirements.in - # keyring - # twine -jaraco-classes==3.4.0 \ - --hash=sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd \ - --hash=sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790 - # via keyring -jaraco-context==6.0.1 \ - --hash=sha256:9bae4ea555cf0b14938dc0aee7c9f32ed303aa20a3b73e7dc80111628792d1b3 \ - --hash=sha256:f797fc481b490edb305122c9181830a3a5b76d84ef6d1aef2fb9b47ab956f9e4 - # via keyring -jaraco-functools==4.1.0 \ - --hash=sha256:70f7e0e2ae076498e212562325e805204fc092d7b4c17e0e86c959e249701a9d \ - --hash=sha256:ad159f13428bc4acbf5541ad6dec511f91573b90fba04df61dafa2a1231cf649 - # via keyring -jeepney==0.8.0 \ - --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ - --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755 - # via - # keyring - # secretstorage -jinja2==3.1.5 \ - --hash=sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb \ - --hash=sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb - # via gcp-releasetool -keyring==25.4.1 \ - --hash=sha256:5426f817cf7f6f007ba5ec722b1bcad95a75b27d780343772ad76b17cb47b0bf \ - --hash=sha256:b07ebc55f3e8ed86ac81dd31ef14e81ace9dd9c3d4b5d77a6e9a2016d0d71a1b - # via - # gcp-releasetool - # twine -markdown-it-py==3.0.0 \ - --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ - --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb - # via rich -markupsafe==3.0.1 \ - --hash=sha256:0778de17cff1acaeccc3ff30cd99a3fd5c50fc58ad3d6c0e0c4c58092b859396 \ - --hash=sha256:0f84af7e813784feb4d5e4ff7db633aba6c8ca64a833f61d8e4eade234ef0c38 \ - --hash=sha256:17b2aea42a7280db02ac644db1d634ad47dcc96faf38ab304fe26ba2680d359a \ - --hash=sha256:242d6860f1fd9191aef5fae22b51c5c19767f93fb9ead4d21924e0bcb17619d8 \ - --hash=sha256:244dbe463d5fb6d7ce161301a03a6fe744dac9072328ba9fc82289238582697b \ - --hash=sha256:26627785a54a947f6d7336ce5963569b5d75614619e75193bdb4e06e21d447ad \ - --hash=sha256:2a4b34a8d14649315c4bc26bbfa352663eb51d146e35eef231dd739d54a5430a \ - --hash=sha256:2ae99f31f47d849758a687102afdd05bd3d3ff7dbab0a8f1587981b58a76152a \ - --hash=sha256:312387403cd40699ab91d50735ea7a507b788091c416dd007eac54434aee51da \ - --hash=sha256:3341c043c37d78cc5ae6e3e305e988532b072329639007fd408a476642a89fd6 \ - --hash=sha256:33d1c36b90e570ba7785dacd1faaf091203d9942bc036118fab8110a401eb1a8 \ - --hash=sha256:3e683ee4f5d0fa2dde4db77ed8dd8a876686e3fc417655c2ece9a90576905344 \ - --hash=sha256:3ffb4a8e7d46ed96ae48805746755fadd0909fea2306f93d5d8233ba23dda12a \ - --hash=sha256:40621d60d0e58aa573b68ac5e2d6b20d44392878e0bfc159012a5787c4e35bc8 \ - --hash=sha256:40f1e10d51c92859765522cbd79c5c8989f40f0419614bcdc5015e7b6bf97fc5 \ - --hash=sha256:45d42d132cff577c92bfba536aefcfea7e26efb975bd455db4e6602f5c9f45e7 \ - --hash=sha256:48488d999ed50ba8d38c581d67e496f955821dc183883550a6fbc7f1aefdc170 \ - --hash=sha256:4935dd7883f1d50e2ffecca0aa33dc1946a94c8f3fdafb8df5c330e48f71b132 \ - --hash=sha256:4c2d64fdba74ad16138300815cfdc6ab2f4647e23ced81f59e940d7d4a1469d9 \ - --hash=sha256:4c8817557d0de9349109acb38b9dd570b03cc5014e8aabf1cbddc6e81005becd \ - --hash=sha256:4ffaaac913c3f7345579db4f33b0020db693f302ca5137f106060316761beea9 \ - --hash=sha256:5a4cb365cb49b750bdb60b846b0c0bc49ed62e59a76635095a179d440540c346 \ - --hash=sha256:62fada2c942702ef8952754abfc1a9f7658a4d5460fabe95ac7ec2cbe0d02abc \ - --hash=sha256:67c519635a4f64e495c50e3107d9b4075aec33634272b5db1cde839e07367589 \ - --hash=sha256:6a54c43d3ec4cf2a39f4387ad044221c66a376e58c0d0e971d47c475ba79c6b5 \ - --hash=sha256:7044312a928a66a4c2a22644147bc61a199c1709712069a344a3fb5cfcf16915 \ - --hash=sha256:730d86af59e0e43ce277bb83970530dd223bf7f2a838e086b50affa6ec5f9295 \ - --hash=sha256:800100d45176652ded796134277ecb13640c1a537cad3b8b53da45aa96330453 \ - --hash=sha256:80fcbf3add8790caddfab6764bde258b5d09aefbe9169c183f88a7410f0f6dea \ - --hash=sha256:82b5dba6eb1bcc29cc305a18a3c5365d2af06ee71b123216416f7e20d2a84e5b \ - --hash=sha256:852dc840f6d7c985603e60b5deaae1d89c56cb038b577f6b5b8c808c97580f1d \ - --hash=sha256:8ad4ad1429cd4f315f32ef263c1342166695fad76c100c5d979c45d5570ed58b \ - --hash=sha256:8ae369e84466aa70f3154ee23c1451fda10a8ee1b63923ce76667e3077f2b0c4 \ - --hash=sha256:93e8248d650e7e9d49e8251f883eed60ecbc0e8ffd6349e18550925e31bd029b \ - --hash=sha256:973a371a55ce9ed333a3a0f8e0bcfae9e0d637711534bcb11e130af2ab9334e7 \ - --hash=sha256:9ba25a71ebf05b9bb0e2ae99f8bc08a07ee8e98c612175087112656ca0f5c8bf \ - --hash=sha256:a10860e00ded1dd0a65b83e717af28845bb7bd16d8ace40fe5531491de76b79f \ - --hash=sha256:a4792d3b3a6dfafefdf8e937f14906a51bd27025a36f4b188728a73382231d91 \ - --hash=sha256:a7420ceda262dbb4b8d839a4ec63d61c261e4e77677ed7c66c99f4e7cb5030dd \ - --hash=sha256:ad91738f14eb8da0ff82f2acd0098b6257621410dcbd4df20aaa5b4233d75a50 \ - --hash=sha256:b6a387d61fe41cdf7ea95b38e9af11cfb1a63499af2759444b99185c4ab33f5b \ - --hash=sha256:b954093679d5750495725ea6f88409946d69cfb25ea7b4c846eef5044194f583 \ - --hash=sha256:bbde71a705f8e9e4c3e9e33db69341d040c827c7afa6789b14c6e16776074f5a \ - --hash=sha256:beeebf760a9c1f4c07ef6a53465e8cfa776ea6a2021eda0d0417ec41043fe984 \ - --hash=sha256:c91b394f7601438ff79a4b93d16be92f216adb57d813a78be4446fe0f6bc2d8c \ - --hash=sha256:c97ff7fedf56d86bae92fa0a646ce1a0ec7509a7578e1ed238731ba13aabcd1c \ - --hash=sha256:cb53e2a99df28eee3b5f4fea166020d3ef9116fdc5764bc5117486e6d1211b25 \ - --hash=sha256:cbf445eb5628981a80f54087f9acdbf84f9b7d862756110d172993b9a5ae81aa \ - --hash=sha256:d06b24c686a34c86c8c1fba923181eae6b10565e4d80bdd7bc1c8e2f11247aa4 \ - --hash=sha256:d98e66a24497637dd31ccab090b34392dddb1f2f811c4b4cd80c230205c074a3 \ - --hash=sha256:db15ce28e1e127a0013dfb8ac243a8e392db8c61eae113337536edb28bdc1f97 \ - --hash=sha256:db842712984e91707437461930e6011e60b39136c7331e971952bb30465bc1a1 \ - --hash=sha256:e24bfe89c6ac4c31792793ad9f861b8f6dc4546ac6dc8f1c9083c7c4f2b335cd \ - --hash=sha256:e81c52638315ff4ac1b533d427f50bc0afc746deb949210bc85f05d4f15fd772 \ - --hash=sha256:e9393357f19954248b00bed7c56f29a25c930593a77630c719653d51e7669c2a \ - --hash=sha256:ee3941769bd2522fe39222206f6dd97ae83c442a94c90f2b7a25d847d40f4729 \ - --hash=sha256:f31ae06f1328595d762c9a2bf29dafd8621c7d3adc130cbb46278079758779ca \ - --hash=sha256:f94190df587738280d544971500b9cafc9b950d32efcb1fba9ac10d84e6aa4e6 \ - --hash=sha256:fa7d686ed9883f3d664d39d5a8e74d3c5f63e603c2e3ff0abcba23eac6542635 \ - --hash=sha256:fb532dd9900381d2e8f48172ddc5a59db4c445a11b9fab40b3b786da40d3b56b \ - --hash=sha256:fe32482b37b4b00c7a52a07211b479653b7fe4f22b2e481b9a9b099d8a430f2f - # via jinja2 -mdurl==0.1.2 \ - --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ - --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba - # via markdown-it-py -more-itertools==10.5.0 \ - --hash=sha256:037b0d3203ce90cca8ab1defbbdac29d5f993fc20131f3664dc8d6acfa872aef \ - --hash=sha256:5482bfef7849c25dc3c6dd53a6173ae4795da2a41a80faea6700d9f5846c5da6 - # via - # jaraco-classes - # jaraco-functools -nh3==0.2.18 \ - --hash=sha256:0411beb0589eacb6734f28d5497ca2ed379eafab8ad8c84b31bb5c34072b7164 \ - --hash=sha256:14c5a72e9fe82aea5fe3072116ad4661af5cf8e8ff8fc5ad3450f123e4925e86 \ - --hash=sha256:19aaba96e0f795bd0a6c56291495ff59364f4300d4a39b29a0abc9cb3774a84b \ - --hash=sha256:34c03fa78e328c691f982b7c03d4423bdfd7da69cd707fe572f544cf74ac23ad \ - --hash=sha256:36c95d4b70530b320b365659bb5034341316e6a9b30f0b25fa9c9eff4c27a204 \ - --hash=sha256:3a157ab149e591bb638a55c8c6bcb8cdb559c8b12c13a8affaba6cedfe51713a \ - --hash=sha256:42c64511469005058cd17cc1537578eac40ae9f7200bedcfd1fc1a05f4f8c200 \ - --hash=sha256:5f36b271dae35c465ef5e9090e1fdaba4a60a56f0bb0ba03e0932a66f28b9189 \ - --hash=sha256:6955369e4d9f48f41e3f238a9e60f9410645db7e07435e62c6a9ea6135a4907f \ - --hash=sha256:7b7c2a3c9eb1a827d42539aa64091640bd275b81e097cd1d8d82ef91ffa2e811 \ - --hash=sha256:8ce0f819d2f1933953fca255db2471ad58184a60508f03e6285e5114b6254844 \ - --hash=sha256:94a166927e53972a9698af9542ace4e38b9de50c34352b962f4d9a7d4c927af4 \ - --hash=sha256:a7f1b5b2c15866f2db413a3649a8fe4fd7b428ae58be2c0f6bca5eefd53ca2be \ - --hash=sha256:c8b3a1cebcba9b3669ed1a84cc65bf005728d2f0bc1ed2a6594a992e817f3a50 \ - --hash=sha256:de3ceed6e661954871d6cd78b410213bdcb136f79aafe22aa7182e028b8c7307 \ - --hash=sha256:f0eca9ca8628dbb4e916ae2491d72957fdd35f7a5d326b7032a345f111ac07fe - # via readme-renderer -nox==2024.10.9 \ - --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ - --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 - # via -r requirements.in -packaging==24.1 \ - --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ - --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 - # via - # gcp-releasetool - # nox -pkginfo==1.10.0 \ - --hash=sha256:5df73835398d10db79f8eecd5cd86b1f6d29317589ea70796994d49399af6297 \ - --hash=sha256:889a6da2ed7ffc58ab5b900d888ddce90bce912f2d2de1dc1c26f4cb9fe65097 - # via twine -platformdirs==4.3.6 \ - --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ - --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb - # via virtualenv -proto-plus==1.24.0 \ - --hash=sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445 \ - --hash=sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12 - # via google-api-core -protobuf==5.28.2 \ - --hash=sha256:2c69461a7fcc8e24be697624c09a839976d82ae75062b11a0972e41fd2cd9132 \ - --hash=sha256:35cfcb15f213449af7ff6198d6eb5f739c37d7e4f1c09b5d0641babf2cc0c68f \ - --hash=sha256:52235802093bd8a2811abbe8bf0ab9c5f54cca0a751fdd3f6ac2a21438bffece \ - --hash=sha256:59379674ff119717404f7454647913787034f03fe7049cbef1d74a97bb4593f0 \ - --hash=sha256:5e8a95246d581eef20471b5d5ba010d55f66740942b95ba9b872d918c459452f \ - --hash=sha256:87317e9bcda04a32f2ee82089a204d3a2f0d3c8aeed16568c7daf4756e4f1fe0 \ - --hash=sha256:8ddc60bf374785fb7cb12510b267f59067fa10087325b8e1855b898a0d81d276 \ - --hash=sha256:a8b9403fc70764b08d2f593ce44f1d2920c5077bf7d311fefec999f8c40f78b7 \ - --hash=sha256:c0ea0123dac3399a2eeb1a1443d82b7afc9ff40241433296769f7da42d142ec3 \ - --hash=sha256:ca53faf29896c526863366a52a8f4d88e69cd04ec9571ed6082fa117fac3ab36 \ - --hash=sha256:eeea10f3dc0ac7e6b4933d32db20662902b4ab81bf28df12218aa389e9c2102d - # via - # gcp-docuploader - # gcp-releasetool - # google-api-core - # googleapis-common-protos - # proto-plus -pyasn1==0.6.1 \ - --hash=sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629 \ - --hash=sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034 - # via - # pyasn1-modules - # rsa -pyasn1-modules==0.4.1 \ - --hash=sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd \ - --hash=sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c - # via google-auth -pycparser==2.22 \ - --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \ - --hash=sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc - # via cffi -pygments==2.18.0 \ - --hash=sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199 \ - --hash=sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a - # via - # readme-renderer - # rich -pyjwt==2.9.0 \ - --hash=sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850 \ - --hash=sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c - # via gcp-releasetool -pyperclip==1.9.0 \ - --hash=sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310 - # via gcp-releasetool -python-dateutil==2.9.0.post0 \ - --hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \ - --hash=sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427 - # via gcp-releasetool -readme-renderer==44.0 \ - --hash=sha256:2fbca89b81a08526aadf1357a8c2ae889ec05fb03f5da67f9769c9a592166151 \ - --hash=sha256:8712034eabbfa6805cacf1402b4eeb2a73028f72d1166d6f5cb7f9c047c5d1e1 - # via twine -requests==2.32.3 \ - --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ - --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 - # via - # gcp-releasetool - # google-api-core - # google-cloud-storage - # requests-toolbelt - # twine -requests-toolbelt==1.0.0 \ - --hash=sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6 \ - --hash=sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06 - # via twine -rfc3986==2.0.0 \ - --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ - --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c - # via twine -rich==13.9.2 \ - --hash=sha256:51a2c62057461aaf7152b4d611168f93a9fc73068f8ded2790f29fe2b5366d0c \ - --hash=sha256:8c82a3d3f8dcfe9e734771313e606b39d8247bb6b826e196f4914b333b743cf1 - # via twine -rsa==4.9 \ - --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ - --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 - # via google-auth -secretstorage==3.3.3 \ - --hash=sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77 \ - --hash=sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99 - # via keyring -six==1.16.0 \ - --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ - --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 - # via - # gcp-docuploader - # python-dateutil -tomli==2.0.2 \ - --hash=sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38 \ - --hash=sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed - # via nox -twine==5.1.1 \ - --hash=sha256:215dbe7b4b94c2c50a7315c0275d2258399280fbb7d04182c7e55e24b5f93997 \ - --hash=sha256:9aa0825139c02b3434d913545c7b847a21c835e11597f5255842d457da2322db - # via -r requirements.in -typing-extensions==4.12.2 \ - --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ - --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 - # via - # -r requirements.in - # rich -urllib3==2.2.3 \ - --hash=sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac \ - --hash=sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9 - # via - # requests - # twine -virtualenv==20.26.6 \ - --hash=sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48 \ - --hash=sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2 - # via nox -wheel==0.44.0 \ - --hash=sha256:2376a90c98cc337d18623527a97c31797bd02bad0033d41547043a1cbfbe448f \ - --hash=sha256:a29c3f2817e95ab89aa4660681ad547c0e9547f20e75b0562fe7723c9a2a9d49 - # via -r requirements.in -zipp==3.20.2 \ - --hash=sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350 \ - --hash=sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29 - # via importlib-metadata - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.1.0 \ - --hash=sha256:35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2 \ - --hash=sha256:d59a21b17a275fb872a9c3dae73963160ae079f1049ed956880cd7c09b120538 - # via -r requirements.in From 3a67384845cbce89aef07d9748da164d748f849d Mon Sep 17 00:00:00 2001 From: Chalmer Lowe Date: Mon, 10 Mar 2025 11:40:58 -0400 Subject: [PATCH 191/210] fix: Updates noxfile with pip freeze and pandas 1.2 (#331) --- packages/db-dtypes/noxfile.py | 13 +++++++++++++ packages/db-dtypes/setup.py | 2 +- packages/db-dtypes/testing/constraints-3.7.txt | 3 +-- 3 files changed, 15 insertions(+), 3 deletions(-) diff --git a/packages/db-dtypes/noxfile.py b/packages/db-dtypes/noxfile.py index c487cd79a6ec..60df10990545 100644 --- a/packages/db-dtypes/noxfile.py +++ b/packages/db-dtypes/noxfile.py @@ -88,6 +88,7 @@ def lint(session): serious code quality issues. """ session.install(FLAKE8_VERSION, BLACK_VERSION) + session.run("python", "-m", "pip", "freeze") session.run( "black", "--check", @@ -100,6 +101,7 @@ def lint(session): def blacken(session): """Run black. Format code to uniform standard.""" session.install(BLACK_VERSION) + session.run("python", "-m", "pip", "freeze") session.run( "black", *LINT_PATHS, @@ -115,6 +117,7 @@ def format(session): session.install(BLACK_VERSION, ISORT_VERSION) # Use the --fss option to sort imports using strict alphabetical order. # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run("python", "-m", "pip", "freeze") session.run( "isort", "--fss", @@ -130,6 +133,7 @@ def format(session): def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" session.install("docutils", "pygments") + session.run("python", "-m", "pip", "freeze") session.run("python", "setup.py", "check", "--restructuredtext", "--strict") @@ -169,6 +173,8 @@ def default(session, tests_path): ) install_unittest_dependencies(session, "-c", constraints_path) + session.run("python", "-m", "pip", "freeze") + # Run py.test against the unit tests. session.run( "py.test", @@ -342,6 +348,8 @@ def system(session): install_systemtest_dependencies(session, "-c", constraints_path) + session.run("python", "-m", "pip", "freeze") + # Run py.test against the system tests. if system_test_exists: session.run( @@ -371,6 +379,8 @@ def cover(session): test runs (not system test runs), and then erases coverage data. """ session.install("coverage", "pytest-cov") + session.run("python", "-m", "pip", "freeze") + session.run("coverage", "report", "--show-missing", "--fail-under=100") session.run("coverage", "erase") @@ -397,6 +407,7 @@ def docs(session): ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run("python", "-m", "pip", "freeze") session.run( "sphinx-build", "-W", # warnings as errors @@ -432,6 +443,7 @@ def docfx(session): ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run("python", "-m", "pip", "freeze") session.run( "sphinx-build", "-T", # show full traceback on exception @@ -515,6 +527,7 @@ def prerelease_deps(session): "requests", ] session.install(*other_deps) + session.run("python", "-m", "pip", "freeze") # Print out prerelease package versions session.run( diff --git a/packages/db-dtypes/setup.py b/packages/db-dtypes/setup.py index 276a9401081f..4076eaa450a8 100644 --- a/packages/db-dtypes/setup.py +++ b/packages/db-dtypes/setup.py @@ -31,7 +31,7 @@ dependencies = [ "packaging >= 17.0", - "pandas >= 0.24.2", + "pandas >= 1.2.0", "pyarrow>=3.0.0", "numpy >= 1.16.6", ] diff --git a/packages/db-dtypes/testing/constraints-3.7.txt b/packages/db-dtypes/testing/constraints-3.7.txt index a7388cdc2344..a5c7a032e256 100644 --- a/packages/db-dtypes/testing/constraints-3.7.txt +++ b/packages/db-dtypes/testing/constraints-3.7.txt @@ -5,7 +5,6 @@ # # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", packaging==17.0 -# Make sure we test with pandas 0.24.2. The Python version isn't that relevant. -pandas==0.24.2 +pandas==1.2.0 pyarrow==3.0.0 numpy==1.16.6 From 8582bf642287b95b4c650f8a6a4a8606ef83b22a Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 14 Mar 2025 09:30:23 -0400 Subject: [PATCH 192/210] fix: remove setup.cfg configuration for creating universal wheels (#333) `setup.cfg` contains a setting to create a `Universal Wheel` which is only needed if libraries support both Python 2 and Python 3. This library only supports Python 3 so this setting is no longer needed. See https://packaging.python.org/en/latest/guides/distributing-packages-using-setuptools/#wheels. See similar PR https://togithub.com/googleapis/google-cloud-python/pull/13659 which includes this stack trace ``` running bdist_wheel /tmp/pip-build-env-9o_3w17v/overlay/lib/python3.13/site-packages/setuptools/_distutils/cmd.py:135: SetuptoolsDeprecationWarning: bdist_wheel.universal is deprecated !! ******************************************************************************** With Python 2.7 end-of-life, support for building universal wheels (i.e., wheels that support both Python 2 and Python 3) is being obviated. Please discontinue using this option, or if you still need it, file an issue with pypa/setuptools describing your use case. By 2025-Aug-30, you need to update your project and remove deprecated calls or your builds will no longer be supported. ******************************************************************************** !! ``` --- packages/db-dtypes/setup.cfg | 19 ------------------- 1 file changed, 19 deletions(-) delete mode 100644 packages/db-dtypes/setup.cfg diff --git a/packages/db-dtypes/setup.cfg b/packages/db-dtypes/setup.cfg deleted file mode 100644 index 052350089505..000000000000 --- a/packages/db-dtypes/setup.cfg +++ /dev/null @@ -1,19 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[bdist_wheel] -universal = 1 From 45c849c126fac218526cf033c41a6c51c9bfd024 Mon Sep 17 00:00:00 2001 From: Chalmer Lowe Date: Thu, 17 Apr 2025 11:08:00 -0400 Subject: [PATCH 193/210] chore: Update Python version to 3.13 in compliance workflow (#335) * Update Python version to 3.13 in compliance workflow Updates the Python version used in the matrix strategy for both the `compliance` and `compliance-prerelease` jobs in the `.github/workflows/compliance.yml` file from 3.12 to 3.13. * Update .github/workflows/compliance.yml * Update .github/workflows/compliance.yml --------- Co-authored-by: google-labs-jules[bot] <161369871+google-labs-jules[bot]@users.noreply.github.com> --- packages/db-dtypes/.github/workflows/compliance.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/db-dtypes/.github/workflows/compliance.yml b/packages/db-dtypes/.github/workflows/compliance.yml index 90a7c8394de3..03695095940b 100644 --- a/packages/db-dtypes/.github/workflows/compliance.yml +++ b/packages/db-dtypes/.github/workflows/compliance.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ['3.12'] + python: ['3.13'] steps: - name: Checkout uses: actions/checkout@v4 @@ -29,7 +29,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ['3.12'] + python: ['3.13'] steps: - name: Checkout uses: actions/checkout@v4 From f1faacb5dfbf70ff9b33031322ab0413a3f8ffe0 Mon Sep 17 00:00:00 2001 From: Chalmer Lowe Date: Thu, 17 Apr 2025 14:20:30 -0400 Subject: [PATCH 194/210] chore: Add Python 3.13 updates for repo settings, noxfile, setup, constraints, .github (AI experiment) (#336) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: Add Python 3.13 updates for repo settings and Kokoro Includes additional updates for Python 3.13 support: - Adds 'unit (3.13)' to the required status checks in the repo settings file (`.github/sync-repo-settings.yaml`). - Populates and updates Kokoro sample configuration files in `.kokoro/samples/python3.13/` to mirror the 3.12 setup, adjusting version-specific variables. * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: google-labs-jules[bot] <161369871+google-labs-jules[bot]@users.noreply.github.com> Co-authored-by: Owl Bot --- packages/db-dtypes/.github/sync-repo-settings.yaml | 1 + packages/db-dtypes/.github/workflows/unittest.yml | 6 +++--- packages/db-dtypes/noxfile.py | 10 +++++++++- packages/db-dtypes/setup.py | 1 + packages/db-dtypes/testing/constraints-3.13.txt | 0 5 files changed, 14 insertions(+), 4 deletions(-) create mode 100644 packages/db-dtypes/testing/constraints-3.13.txt diff --git a/packages/db-dtypes/.github/sync-repo-settings.yaml b/packages/db-dtypes/.github/sync-repo-settings.yaml index 92e3e7692b05..bae6e96413b6 100644 --- a/packages/db-dtypes/.github/sync-repo-settings.yaml +++ b/packages/db-dtypes/.github/sync-repo-settings.yaml @@ -16,6 +16,7 @@ branchProtectionRules: - 'unit (3.10)' - 'unit (3.11)' - 'unit (3.12)' + - 'unit (3.13)' - 'cover' permissionRules: - team: actools-python diff --git a/packages/db-dtypes/.github/workflows/unittest.yml b/packages/db-dtypes/.github/workflows/unittest.yml index d919b44ca951..699045cf1c81 100644 --- a/packages/db-dtypes/.github/workflows/unittest.yml +++ b/packages/db-dtypes/.github/workflows/unittest.yml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-22.04 strategy: matrix: - python: ['3.8', '3.9', '3.10', '3.11', '3.12'] + python: ['3.8', '3.9', '3.10', '3.11', '3.12', '3.13'] steps: - name: Checkout uses: actions/checkout@v4 @@ -39,7 +39,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ['3.12'] + python: ['3.13'] steps: - name: Checkout uses: actions/checkout@v4 @@ -67,7 +67,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ['3.12'] + python: ['3.13'] steps: - name: Checkout uses: actions/checkout@v4 diff --git a/packages/db-dtypes/noxfile.py b/packages/db-dtypes/noxfile.py index 60df10990545..363fc2eb60b5 100644 --- a/packages/db-dtypes/noxfile.py +++ b/packages/db-dtypes/noxfile.py @@ -34,7 +34,15 @@ DEFAULT_PYTHON_VERSION = "3.8" -UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] +UNIT_TEST_PYTHON_VERSIONS: List[str] = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", + "3.12", + "3.13", +] UNIT_TEST_STANDARD_DEPENDENCIES = [ "mock", "asyncmock", diff --git a/packages/db-dtypes/setup.py b/packages/db-dtypes/setup.py index 4076eaa450a8..98bed9d4b3cd 100644 --- a/packages/db-dtypes/setup.py +++ b/packages/db-dtypes/setup.py @@ -69,6 +69,7 @@ def readme(): "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Operating System :: OS Independent", "Topic :: Database :: Front-Ends", ], diff --git a/packages/db-dtypes/testing/constraints-3.13.txt b/packages/db-dtypes/testing/constraints-3.13.txt new file mode 100644 index 000000000000..e69de29bb2d1 From 86c611d36872e779835ca5eb849f32bafe419a33 Mon Sep 17 00:00:00 2001 From: Chalmer Lowe Date: Thu, 24 Apr 2025 15:56:31 -0400 Subject: [PATCH 195/210] deps: Updates deprecation warning to FutureWarning re: 3.7 and 3.8 (#338) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * deps: Updates deprecation warning to FutureWarning re: 3.7 and 3.8 * Changes pytest flag to ignore FutureWarning re: 3.7 and 3.8 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Changes pytest flag to ignore FutureWarning re: 3.7 and 3.8 --------- Co-authored-by: Owl Bot --- packages/db-dtypes/db_dtypes/__init__.py | 26 +++--- packages/db-dtypes/noxfile.py | 4 +- packages/db-dtypes/tests/unit/test__init__.py | 85 +++++++++++++++++++ 3 files changed, 103 insertions(+), 12 deletions(-) create mode 100644 packages/db-dtypes/tests/unit/test__init__.py diff --git a/packages/db-dtypes/db_dtypes/__init__.py b/packages/db-dtypes/db_dtypes/__init__.py index 2424ff43c636..6656671d72f2 100644 --- a/packages/db-dtypes/db_dtypes/__init__.py +++ b/packages/db-dtypes/db_dtypes/__init__.py @@ -347,16 +347,22 @@ def __sub__(self, other): return super().__sub__(other) -sys_major, sys_minor, sys_micro = _versions_helpers.extract_runtime_version() -if sys_major == 3 and sys_minor in (7, 8): - warnings.warn( - "The python-bigquery library will stop supporting Python 3.7 " - "and Python 3.8 in a future major release expected in Q4 2024. " - f"Your Python version is {sys_major}.{sys_minor}.{sys_micro}. We " - "recommend that you update soon to ensure ongoing support. For " - "more details, see: [Google Cloud Client Libraries Supported Python Versions policy](https://cloud.google.com/python/docs/supported-python-versions)", - PendingDeprecationWarning, - ) +def _check_python_version(): + """Checks the runtime Python version and issues a warning if needed.""" + sys_major, sys_minor, sys_micro = _versions_helpers.extract_runtime_version() + if sys_major == 3 and sys_minor in (7, 8): + warnings.warn( + "The python-bigquery library as well as the python-db-dtypes-pandas library no " + "longer supports Python 3.7 and Python 3.8. " + f"Your Python version is {sys_major}.{sys_minor}.{sys_micro}. We " + "recommend that you update soon to ensure ongoing support. For " + "more details, see: [Google Cloud Client Libraries Supported Python Versions policy](https://cloud.google.com/python/docs/supported-python-versions)", + FutureWarning, + stacklevel=2, # Point warning to the caller of __init__ + ) + + +_check_python_version() if not JSONArray or not JSONDtype: diff --git a/packages/db-dtypes/noxfile.py b/packages/db-dtypes/noxfile.py index 363fc2eb60b5..b3c9450ff585 100644 --- a/packages/db-dtypes/noxfile.py +++ b/packages/db-dtypes/noxfile.py @@ -187,7 +187,7 @@ def default(session, tests_path): session.run( "py.test", "--quiet", - "-W default::PendingDeprecationWarning", + "-W default::FutureWarning", f"--junitxml={os.path.split(tests_path)[-1]}_{session.python}_sponge_log.xml", "--cov=db_dtypes", "--cov=tests/unit", @@ -265,7 +265,7 @@ def prerelease(session, tests_path): session.run( "py.test", "--quiet", - "-W default::PendingDeprecationWarning", + "-W default::FutureWarning", f"--junitxml={os.path.split(tests_path)[-1]}_prerelease_{session.python}_sponge_log.xml", "--cov=db_dtypes", "--cov=tests/unit", diff --git a/packages/db-dtypes/tests/unit/test__init__.py b/packages/db-dtypes/tests/unit/test__init__.py new file mode 100644 index 000000000000..4b86d5458315 --- /dev/null +++ b/packages/db-dtypes/tests/unit/test__init__.py @@ -0,0 +1,85 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from unittest import mock + +import pytest + +# Module paths used for mocking +MODULE_PATH = "db_dtypes" +HELPER_MODULE_PATH = f"{MODULE_PATH}._versions_helpers" +MOCK_EXTRACT_VERSION = f"{HELPER_MODULE_PATH}.extract_runtime_version" +MOCK_WARN = "warnings.warn" # Target the standard warnings module + + +@pytest.mark.parametrize( + "mock_version_tuple, version_str", + [ + ((3, 7, 10), "3.7.10"), + ((3, 7, 0), "3.7.0"), + ((3, 8, 5), "3.8.5"), + ((3, 8, 12), "3.8.12"), + ], +) +def test_check_python_version_warns_on_unsupported(mock_version_tuple, version_str): + """ + Test that _check_python_version issues a FutureWarning for Python 3.7/3.8. + """ + + from db_dtypes import _check_python_version + + # Mock the helper function it calls and the warnings.warn function + with mock.patch(MOCK_EXTRACT_VERSION, return_value=mock_version_tuple), mock.patch( + MOCK_WARN + ) as mock_warn_call: + _check_python_version() # Call the function + + # Assert that warnings.warn was called exactly once + mock_warn_call.assert_called_once() + + # Check the arguments passed to warnings.warn + args, kwargs = mock_warn_call.call_args + assert len(args) >= 1 # Should have at least the message + warning_message = args[0] + warning_category = args[1] if len(args) > 1 else kwargs.get("category") + + # Verify message content and category + assert "longer supports Python 3.7 and Python 3.8" in warning_message + assert warning_category == FutureWarning + + +@pytest.mark.parametrize( + "mock_version_tuple", + [ + (3, 9, 1), + (3, 10, 0), + (3, 11, 2), + (3, 12, 0), + ], +) +def test_check_python_version_does_not_warn_on_supported(mock_version_tuple): + """ + Test that _check_python_version does NOT issue a warning for other versions. + """ + + from db_dtypes import _check_python_version + + # Mock the helper function it calls and the warnings.warn function + with mock.patch(MOCK_EXTRACT_VERSION, return_value=mock_version_tuple), mock.patch( + MOCK_WARN + ) as mock_warn_call: + _check_python_version() + + # Assert that warnings.warn was NOT called + mock_warn_call.assert_not_called() From 08f464bea55e4da493ea8eecc79f3c532503afa4 Mon Sep 17 00:00:00 2001 From: Chalmer Lowe Date: Wed, 7 May 2025 15:57:24 -0400 Subject: [PATCH 196/210] deps!: Drop support for Python 3.7 and 3.8 (AI Experiment) (#337) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: Drop support for Python 3.7 and 3.8 Removes support for Python 3.7 and 3.8, establishing Python 3.9 as the new minimum supported version. This change involves: - Updating `python_requires` and classifiers in `setup.py`. - Modifying Python versions in `noxfile.py` (default, unit tests, system tests) and ensuring constraint file logic remains correct. - Updating the GitHub Actions workflow (`unittest.yml`) matrix, runner, and coverage job version. - Deleting constraint files for Python 3.7 and 3.8 (`testing/constraints-3.7.txt`, `testing/constraints-3.8.txt`). - Removing Kokoro sample configuration directories (`.kokoro/samples/python3.7/`, `.kokoro/samples/python3.8/`). - Updating supported version mentions in `README.rst`. - Removing 3.7 and 3.8 from the `ALL_VERSIONS` list in `samples/snippets/noxfile.py`. * Updates python version in lint.yml * Updates owlbot, removing reference to 3.8 * Updates CONTRIBUTING.rst * updates pytest warnings * Removes test_samples-impl ref to older virtualenv package * Removes references to pandas older than 1.5.0 * Removes pandas older than 1.5 and misc changes * updates pandas in setup.py * more updates related to pandas * still broken * Updates FutureWarning tests to account for unittest coverage * Updates json array type tests to account for unittest coverage * updates python version checks to ensure coverage * update json test for unittest coverage * Update pandas_backports unittests to ensure coverage * Updates per review comments * moves class from version specific compliance file to generic file * Removes weird cut and paste error * fix linting errors * updates import statement to ensure import of JSONArrowType * Revise required github status checks * update linting * temporarily marking a class as no cover * more updates * marked several snippets as pragma no cover * updates linting * Updates constraints and setup.py * migrates class from one time compliance file to another * updating pyarrow version * Updates linting * removes determine all and module reload tests * updates re: ndarrybackedextensionarray * testing blacken as part of owlbot processing using 3.8 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * updates blacken to 3.10 * update python version in lint.yml * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * updates owlbot.py * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * updates owlbot.py * testing lint.yml * testing linting issue * testing linting issue * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * updates lint_setup session * Update noxfile.py --------- Co-authored-by: google-labs-jules[bot] <161369871+google-labs-jules[bot]@users.noreply.github.com> Co-authored-by: Owl Bot --- .../db-dtypes/.github/sync-repo-settings.yaml | 1 - .../db-dtypes/.github/workflows/unittest.yml | 10 +- packages/db-dtypes/CONTRIBUTING.rst | 4 +- packages/db-dtypes/README.rst | 4 +- packages/db-dtypes/db_dtypes/__init__.py | 42 +++------ packages/db-dtypes/db_dtypes/core.py | 8 +- .../db-dtypes/db_dtypes/pandas_backports.py | 92 +------------------ packages/db-dtypes/db_dtypes/version.py | 2 +- packages/db-dtypes/noxfile.py | 22 +++-- packages/db-dtypes/owlbot.py | 2 +- packages/db-dtypes/pytest.ini | 7 -- .../samples/snippets/requirements-test.txt | 3 +- packages/db-dtypes/setup.py | 12 +-- .../db-dtypes/testing/constraints-3.7.txt | 10 -- .../db-dtypes/testing/constraints-3.8.txt | 2 - .../db-dtypes/testing/constraints-3.9.txt | 4 +- .../compliance/date/test_date_compliance.py | 6 ++ .../date/test_date_compliance_1_5.py | 31 ------- .../compliance/time/test_time_compliance.py | 5 + .../time/test_time_compliance_1_5.py | 31 ------- packages/db-dtypes/tests/unit/test_dtypes.py | 41 ++++----- packages/db-dtypes/tests/unit/test_json.py | 2 + .../tests/unit/test_pandas_backports.py | 16 ++++ 23 files changed, 97 insertions(+), 260 deletions(-) delete mode 100644 packages/db-dtypes/testing/constraints-3.7.txt delete mode 100644 packages/db-dtypes/testing/constraints-3.8.txt delete mode 100644 packages/db-dtypes/tests/compliance/date/test_date_compliance_1_5.py delete mode 100644 packages/db-dtypes/tests/compliance/time/test_time_compliance_1_5.py diff --git a/packages/db-dtypes/.github/sync-repo-settings.yaml b/packages/db-dtypes/.github/sync-repo-settings.yaml index bae6e96413b6..55cd410a768a 100644 --- a/packages/db-dtypes/.github/sync-repo-settings.yaml +++ b/packages/db-dtypes/.github/sync-repo-settings.yaml @@ -11,7 +11,6 @@ branchProtectionRules: - 'cla/google' - 'docs' - 'lint' - - 'unit (3.8)' - 'unit (3.9)' - 'unit (3.10)' - 'unit (3.11)' diff --git a/packages/db-dtypes/.github/workflows/unittest.yml b/packages/db-dtypes/.github/workflows/unittest.yml index 699045cf1c81..41694c386562 100644 --- a/packages/db-dtypes/.github/workflows/unittest.yml +++ b/packages/db-dtypes/.github/workflows/unittest.yml @@ -5,13 +5,11 @@ on: name: unittest jobs: unit: - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2303): use `ubuntu-latest` once this bug is fixed. - # Use ubuntu-22.04 until Python 3.7 is removed from the test matrix - # https://docs.github.com/en/actions/using-github-hosted-runners/using-github-hosted-runners/about-github-hosted-runners#standard-github-hosted-runners-for-public-repositories - runs-on: ubuntu-22.04 + # Use `ubuntu-latest` runner. + runs-on: ubuntu-latest strategy: matrix: - python: ['3.8', '3.9', '3.10', '3.11', '3.12', '3.13'] + python: ['3.9', '3.10', '3.11', '3.12', '3.13'] steps: - name: Checkout uses: actions/checkout@v4 @@ -103,7 +101,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v5 with: - python-version: "3.8" + python-version: "3.9" - name: Install coverage run: | python -m pip install --upgrade setuptools pip wheel diff --git a/packages/db-dtypes/CONTRIBUTING.rst b/packages/db-dtypes/CONTRIBUTING.rst index 0bda74ace1e4..c333038d8c05 100644 --- a/packages/db-dtypes/CONTRIBUTING.rst +++ b/packages/db-dtypes/CONTRIBUTING.rst @@ -143,12 +143,12 @@ Running System Tests $ nox -s system # Run a single system test - $ nox -s system-3.8 -- -k + $ nox -s system-3.9 -- -k .. note:: - System tests are only configured to run under Python 3.8. + System tests are only configured to run under Python 3.9. For expediency, we do not run them in older versions of Python 3. This alone will not run the tests. You'll need to change some local diff --git a/packages/db-dtypes/README.rst b/packages/db-dtypes/README.rst index abf1e8741259..eab2705e2487 100644 --- a/packages/db-dtypes/README.rst +++ b/packages/db-dtypes/README.rst @@ -34,11 +34,11 @@ dependencies. Supported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^ -Python >= 3.7 +Python >= 3.9 Unsupported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Python <= 3.6. +Python <= 3.8. Mac/Linux diff --git a/packages/db-dtypes/db_dtypes/__init__.py b/packages/db-dtypes/db_dtypes/__init__.py index 6656671d72f2..c2b23daf4fca 100644 --- a/packages/db-dtypes/db_dtypes/__init__.py +++ b/packages/db-dtypes/db_dtypes/__init__.py @@ -21,7 +21,6 @@ import warnings import numpy -import packaging.version import pandas import pandas.api.extensions from pandas.errors import OutOfBoundsDatetime @@ -29,7 +28,7 @@ import pyarrow.compute from db_dtypes import core -from db_dtypes.version import __version__ +from db_dtypes.json import JSONArray, JSONDtype, JSONArrowType # noqa: F401 from . import _versions_helpers @@ -47,15 +46,6 @@ _NP_BOX_DTYPE = "datetime64[us]" -# To use JSONArray and JSONDtype, you'll need Pandas 1.5.0 or later. With the removal -# of Python 3.7 compatibility, the minimum Pandas version will be updated to 1.5.0. -if packaging.version.Version(pandas.__version__) >= packaging.version.Version("1.5.0"): - from db_dtypes.json import JSONArray, JSONArrowType, JSONDtype -else: - JSONArray = None - JSONDtype = None - - @pandas.api.extensions.register_extension_dtype class TimeDtype(core.BaseDatetimeDtype): """ @@ -364,23 +354,13 @@ def _check_python_version(): _check_python_version() - -if not JSONArray or not JSONDtype: - __all__ = [ - "__version__", - "DateArray", - "DateDtype", - "TimeArray", - "TimeDtype", - ] -else: - __all__ = [ - "__version__", - "DateArray", - "DateDtype", - "JSONDtype", - "JSONArray", - "JSONArrowType", - "TimeArray", - "TimeDtype", - ] +__all__ = [ + "__version__", + "DateArray", + "DateDtype", + "TimeArray", + "TimeDtype", + "JSONDtype", + "JSONArray", + "JSONArrowType", +] diff --git a/packages/db-dtypes/db_dtypes/core.py b/packages/db-dtypes/db_dtypes/core.py index 7c9eb6b9a834..926a11094018 100644 --- a/packages/db-dtypes/db_dtypes/core.py +++ b/packages/db-dtypes/db_dtypes/core.py @@ -18,6 +18,7 @@ import pandas import pandas.api.extensions from pandas.api.types import is_dtype_equal, is_list_like, is_scalar, pandas_dtype +from pandas.core.arrays import _mixins from db_dtypes import pandas_backports @@ -42,9 +43,7 @@ def construct_from_string(cls, name: str): return cls() -class BaseDatetimeArray( - pandas_backports.OpsMixin, pandas_backports.NDArrayBackedExtensionArray -): +class BaseDatetimeArray(pandas_backports.OpsMixin, _mixins.NDArrayBackedExtensionArray): # scalar used to denote NA value inside our self._ndarray, e.g. -1 for # Categorical, iNaT for Period. Outside of object dtype, self.isna() should # be exactly locations in self._ndarray with _internal_fill_value. See: @@ -186,9 +185,6 @@ def median( keepdims: bool = False, skipna: bool = True, ): - if not hasattr(pandas_backports, "numpy_validate_median"): - raise NotImplementedError("Need pandas 1.3 or later to calculate median.") - pandas_backports.numpy_validate_median( (), {"out": out, "overwrite_input": overwrite_input, "keepdims": keepdims}, diff --git a/packages/db-dtypes/db_dtypes/pandas_backports.py b/packages/db-dtypes/db_dtypes/pandas_backports.py index f8009ea59dfc..378bb41708f7 100644 --- a/packages/db-dtypes/db_dtypes/pandas_backports.py +++ b/packages/db-dtypes/db_dtypes/pandas_backports.py @@ -19,18 +19,13 @@ the versions in the later versions of pandas. """ -from typing import Any - -import numpy import packaging.version import pandas -from pandas.api.types import is_integer import pandas.compat.numpy.function -import pandas.core.nanops pandas_release = packaging.version.parse(pandas.__version__).release -# Create aliases for private methods in case they move in a future version. +# # Create aliases for private methods in case they move in a future version. nanall = pandas.core.nanops.nanall nanany = pandas.core.nanops.nanany nanmax = pandas.core.nanops.nanmax @@ -40,9 +35,8 @@ numpy_validate_max = pandas.compat.numpy.function.validate_max numpy_validate_min = pandas.compat.numpy.function.validate_min -if pandas_release >= (1, 3): - nanmedian = pandas.core.nanops.nanmedian - numpy_validate_median = pandas.compat.numpy.function.validate_median +nanmedian = pandas.core.nanops.nanmedian +numpy_validate_median = pandas.compat.numpy.function.validate_median def import_default(module_name, force=False, default=None): @@ -78,83 +72,3 @@ def import_default(module_name, force=False, default=None): class OpsMixin: def _cmp_method(self, other, op): # pragma: NO COVER return NotImplemented - - -# TODO: use public API once pandas 1.5 / 2.x is released. -# See: https://github.com/pandas-dev/pandas/pull/45544 -@import_default("pandas.core.arrays._mixins", pandas_release < (1, 3)) -class NDArrayBackedExtensionArray(pandas.core.arrays.base.ExtensionArray): - def __init__(self, values, dtype): - assert isinstance(values, numpy.ndarray) - self._ndarray = values - self._dtype = dtype - - @classmethod - def _from_backing_data(cls, data): - return cls(data, data.dtype) - - def __getitem__(self, index): - value = self._ndarray[index] - if is_integer(index): - return self._box_func(value) - return self.__class__(value, self._dtype) - - def __setitem__(self, index, value): - self._ndarray[index] = self._validate_setitem_value(value) - - def __len__(self): - return len(self._ndarray) - - @property - def shape(self): - return self._ndarray.shape - - @property - def ndim(self) -> int: - return self._ndarray.ndim - - @property - def size(self) -> int: - return self._ndarray.size - - @property - def nbytes(self) -> int: - return self._ndarray.nbytes - - def copy(self): - return self[:] - - def repeat(self, n): - return self.__class__(self._ndarray.repeat(n), self._dtype) - - def take( - self, - indices, - *, - allow_fill: bool = False, - fill_value: Any = None, - axis: int = 0, - ): - from pandas.core.algorithms import take - - if allow_fill: - fill_value = self._validate_scalar(fill_value) - - new_data = take( - self._ndarray, - indices, - allow_fill=allow_fill, - fill_value=fill_value, - axis=axis, - ) - return self._from_backing_data(new_data) - - @classmethod - def _concat_same_type(cls, to_concat, axis=0): - dtypes = {str(x.dtype) for x in to_concat} - if len(dtypes) != 1: - raise ValueError("to_concat must have the same dtype (tz)", dtypes) - - new_values = [x._ndarray for x in to_concat] - new_values = numpy.concatenate(new_values, axis=axis) - return to_concat[0]._from_backing_data(new_values) # type: ignore[arg-type] diff --git a/packages/db-dtypes/db_dtypes/version.py b/packages/db-dtypes/db_dtypes/version.py index c97e3cac4ab7..d74293bf0d47 100644 --- a/packages/db-dtypes/db_dtypes/version.py +++ b/packages/db-dtypes/db_dtypes/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "1.4.2" +__version__ = "1.4.2" # pragma: NO COVER diff --git a/packages/db-dtypes/noxfile.py b/packages/db-dtypes/noxfile.py index b3c9450ff585..a2444ac9f54d 100644 --- a/packages/db-dtypes/noxfile.py +++ b/packages/db-dtypes/noxfile.py @@ -32,11 +32,9 @@ ISORT_VERSION = "isort==5.11.0" LINT_PATHS = ["docs", "db_dtypes", "tests", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.8" +DEFAULT_PYTHON_VERSION = "3.9" UNIT_TEST_PYTHON_VERSIONS: List[str] = [ - "3.7", - "3.8", "3.9", "3.10", "3.11", @@ -56,7 +54,7 @@ UNIT_TEST_EXTRAS: List[str] = [] UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} -SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8"] +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.9"] SYSTEM_TEST_STANDARD_DEPENDENCIES: List[str] = [ "mock", "pytest", @@ -88,7 +86,10 @@ nox.options.error_on_missing_interpreters = True -@nox.session(python=DEFAULT_PYTHON_VERSION) +# TODO: the linting process still uses python 3.8. +# As soon as that gets upgraded, we should be able to revert this session +# to using the DEFAULT_PYTHON_VERSION. +@nox.session(python="3.8") def lint(session): """Run linters. @@ -105,7 +106,11 @@ def lint(session): session.run("flake8", "db_dtypes", "tests") -@nox.session(python=DEFAULT_PYTHON_VERSION) +# TODO: the owlbot-python docker image still has python 3.8 installed ( +# and only 3.8). +# As soon as that gets upgraded, we should be able to revert this session +# to using the DEFAULT_PYTHON_VERSION. +@nox.session(python="3.8") def blacken(session): """Run black. Format code to uniform standard.""" session.install(BLACK_VERSION) @@ -137,7 +142,10 @@ def format(session): ) -@nox.session(python=DEFAULT_PYTHON_VERSION) +# TODO: the linting process still uses python 3.8. +# As soon as that gets upgraded, we should be able to revert this session +# to using the DEFAULT_PYTHON_VERSION. +@nox.session(python="3.8") def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" session.install("docutils", "pygments") diff --git a/packages/db-dtypes/owlbot.py b/packages/db-dtypes/owlbot.py index 18bd6238073f..04664d816049 100644 --- a/packages/db-dtypes/owlbot.py +++ b/packages/db-dtypes/owlbot.py @@ -28,7 +28,7 @@ # Add templated files # ---------------------------------------------------------------------------- templated_files = common.py_library( - system_test_python_versions=["3.8"], + system_test_python_versions=["3.9"], cov_level=100, intersphinx_dependencies={ "pandas": "https://pandas.pydata.org/pandas-docs/stable/" diff --git a/packages/db-dtypes/pytest.ini b/packages/db-dtypes/pytest.ini index c58342dda4bc..dbe13ba38624 100644 --- a/packages/db-dtypes/pytest.ini +++ b/packages/db-dtypes/pytest.ini @@ -2,13 +2,6 @@ filterwarnings = # treat all warnings as errors error - # Remove once support for python 3.7 and 3.8 is dropped - # Ignore warnings from older versions of pandas which still have python 3.7/3.8 support - ignore:.*distutils Version classes are deprecated:DeprecationWarning - ignore:.*resolve package from __spec__ or __package__, falling back on __name__ and __path__:ImportWarning - # Remove once https://github.com/dateutil/dateutil/issues/1314 is fixed - # dateutil is a dependency of pandas - ignore:datetime.datetime.utcfromtimestamp\(\) is deprecated:DeprecationWarning:dateutil.tz.tz # Remove once https://github.com/googleapis/python-db-dtypes-pandas/issues/227 is fixed ignore:.*any.*with datetime64 dtypes is deprecated and will raise in a future version:FutureWarning ignore:.*all.*with datetime64 dtypes is deprecated and will raise in a future version:FutureWarning diff --git a/packages/db-dtypes/samples/snippets/requirements-test.txt b/packages/db-dtypes/samples/snippets/requirements-test.txt index 57b712fe7294..2c78728ca5d7 100644 --- a/packages/db-dtypes/samples/snippets/requirements-test.txt +++ b/packages/db-dtypes/samples/snippets/requirements-test.txt @@ -1,2 +1 @@ -pytest===7.4.4; python_version == '3.7' # prevents dependabot from upgrading it -pytest==8.3.3; python_version > '3.7' +pytest==8.3.5 diff --git a/packages/db-dtypes/setup.py b/packages/db-dtypes/setup.py index 98bed9d4b3cd..80a69b6ab022 100644 --- a/packages/db-dtypes/setup.py +++ b/packages/db-dtypes/setup.py @@ -30,10 +30,10 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "packaging >= 17.0", - "pandas >= 1.2.0", - "pyarrow>=3.0.0", - "numpy >= 1.16.6", + "numpy >= 1.24.0", + "packaging >= 24.2.0", + "pandas >= 1.5.3", + "pyarrow >= 13.0.0", ] package_root = os.path.abspath(os.path.dirname(__file__)) @@ -63,8 +63,6 @@ def readme(): "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", @@ -75,6 +73,6 @@ def readme(): ], platforms="Posix; MacOS X; Windows", install_requires=dependencies, - python_requires=">=3.7", + python_requires=">=3.9", tests_require=["pytest"], ) diff --git a/packages/db-dtypes/testing/constraints-3.7.txt b/packages/db-dtypes/testing/constraints-3.7.txt deleted file mode 100644 index a5c7a032e256..000000000000 --- a/packages/db-dtypes/testing/constraints-3.7.txt +++ /dev/null @@ -1,10 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List *all* library dependencies and extras in this file. -# Pin the version to the lower bound. -# -# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", -packaging==17.0 -pandas==1.2.0 -pyarrow==3.0.0 -numpy==1.16.6 diff --git a/packages/db-dtypes/testing/constraints-3.8.txt b/packages/db-dtypes/testing/constraints-3.8.txt deleted file mode 100644 index 2e7f3549a6f6..000000000000 --- a/packages/db-dtypes/testing/constraints-3.8.txt +++ /dev/null @@ -1,2 +0,0 @@ -# Make sure we test with pandas 1.2.0. The Python version isn't that relevant. -pandas==1.2.0 diff --git a/packages/db-dtypes/testing/constraints-3.9.txt b/packages/db-dtypes/testing/constraints-3.9.txt index afea9b0db5c1..1019c1cfb616 100644 --- a/packages/db-dtypes/testing/constraints-3.9.txt +++ b/packages/db-dtypes/testing/constraints-3.9.txt @@ -1,3 +1,5 @@ # Make sure we test with pandas 1.5.3. The Python version isn't that relevant. +numpy==1.24.0 +packaging==24.2.0 pandas==1.5.3 -numpy==1.24.0 \ No newline at end of file +pyarrow==13.0.0 diff --git a/packages/db-dtypes/tests/compliance/date/test_date_compliance.py b/packages/db-dtypes/tests/compliance/date/test_date_compliance.py index 038005a5d9bb..52b9c042b5fd 100644 --- a/packages/db-dtypes/tests/compliance/date/test_date_compliance.py +++ b/packages/db-dtypes/tests/compliance/date/test_date_compliance.py @@ -117,3 +117,9 @@ class TestReshaping(base.BaseReshapingTests): class TestSetitem(base.BaseSetitemTests): pass + + +# NDArrayBacked2DTests suite added in https://github.com/pandas-dev/pandas/pull/44974 +# v1.4.0rc0 +class Test2DCompat(base.NDArrayBacked2DTests): + pass diff --git a/packages/db-dtypes/tests/compliance/date/test_date_compliance_1_5.py b/packages/db-dtypes/tests/compliance/date/test_date_compliance_1_5.py deleted file mode 100644 index e8f2c93fce87..000000000000 --- a/packages/db-dtypes/tests/compliance/date/test_date_compliance_1_5.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -Tests for extension interface compliance, inherited from pandas. - -See: -https://github.com/pandas-dev/pandas/blob/main/pandas/tests/extension/decimal/test_decimal.py -and -https://github.com/pandas-dev/pandas/blob/main/pandas/tests/extension/test_period.py -""" - -from pandas.tests.extension import base -import pytest - -# NDArrayBacked2DTests suite added in https://github.com/pandas-dev/pandas/pull/44974 -pytest.importorskip("pandas", minversion="1.5.0dev") - - -class Test2DCompat(base.NDArrayBacked2DTests): - pass diff --git a/packages/db-dtypes/tests/compliance/time/test_time_compliance.py b/packages/db-dtypes/tests/compliance/time/test_time_compliance.py index f894ba5497c7..118c61dfe27f 100644 --- a/packages/db-dtypes/tests/compliance/time/test_time_compliance.py +++ b/packages/db-dtypes/tests/compliance/time/test_time_compliance.py @@ -34,6 +34,11 @@ # compliance tests for reduction operations. +# NDArrayBacked2DTests suite added in https://github.com/pandas-dev/pandas/pull/44974 +class Test2DCompat(base.NDArrayBacked2DTests): + pass + + class TestComparisonOps(base.BaseComparisonOpsTests): pass diff --git a/packages/db-dtypes/tests/compliance/time/test_time_compliance_1_5.py b/packages/db-dtypes/tests/compliance/time/test_time_compliance_1_5.py deleted file mode 100644 index e8f2c93fce87..000000000000 --- a/packages/db-dtypes/tests/compliance/time/test_time_compliance_1_5.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -Tests for extension interface compliance, inherited from pandas. - -See: -https://github.com/pandas-dev/pandas/blob/main/pandas/tests/extension/decimal/test_decimal.py -and -https://github.com/pandas-dev/pandas/blob/main/pandas/tests/extension/test_period.py -""" - -from pandas.tests.extension import base -import pytest - -# NDArrayBacked2DTests suite added in https://github.com/pandas-dev/pandas/pull/44974 -pytest.importorskip("pandas", minversion="1.5.0dev") - - -class Test2DCompat(base.NDArrayBacked2DTests): - pass diff --git a/packages/db-dtypes/tests/unit/test_dtypes.py b/packages/db-dtypes/tests/unit/test_dtypes.py index 87b6a920b7c1..381a58027d4c 100644 --- a/packages/db-dtypes/tests/unit/test_dtypes.py +++ b/packages/db-dtypes/tests/unit/test_dtypes.py @@ -14,14 +14,11 @@ import datetime -import packaging.version import pytest pd = pytest.importorskip("pandas") np = pytest.importorskip("numpy") -pandas_release = packaging.version.parse(pd.__version__).release - SAMPLE_RAW_VALUES = dict( dbdate=(datetime.date(2021, 2, 2), "2021-2-3", pd.NaT), dbtime=(datetime.time(1, 2, 2), "1:2:3.5", pd.NaT), @@ -538,39 +535,37 @@ def test_min_max_median(dtype): a = cls(data) assert a.min() == sample_values[0] assert a.max() == sample_values[-1] - if pandas_release >= (1, 3): - assert ( - a.median() == datetime.time(1, 2, 4) - if dtype == "dbtime" - else datetime.date(2021, 2, 3) - ) + + assert ( + a.median() == datetime.time(1, 2, 4) + if dtype == "dbtime" + else datetime.date(2021, 2, 3) + ) empty = cls([]) assert empty.min() is pd.NaT assert empty.max() is pd.NaT - if pandas_release >= (1, 3): - assert empty.median() is pd.NaT + assert empty.median() is pd.NaT empty = cls([None]) assert empty.min() is pd.NaT assert empty.max() is pd.NaT assert empty.min(skipna=False) is pd.NaT assert empty.max(skipna=False) is pd.NaT - if pandas_release >= (1, 3): - with pytest.warns(RuntimeWarning, match="empty slice"): - # It's weird that we get the warning here, and not - # below. :/ - assert empty.median() is pd.NaT - assert empty.median(skipna=False) is pd.NaT + + with pytest.warns(RuntimeWarning, match="empty slice"): + # It's weird that we get the warning here, and not + # below. :/ + assert empty.median() is pd.NaT + assert empty.median(skipna=False) is pd.NaT a = _make_one(dtype) assert a.min() == sample_values[0] assert a.max() == sample_values[1] - if pandas_release >= (1, 3): - assert ( - a.median() == datetime.time(1, 2, 2, 750000) - if dtype == "dbtime" - else datetime.date(2021, 2, 2) - ) + assert ( + a.median() == datetime.time(1, 2, 2, 750000) + if dtype == "dbtime" + else datetime.date(2021, 2, 2) + ) def test_date_add(): diff --git a/packages/db-dtypes/tests/unit/test_json.py b/packages/db-dtypes/tests/unit/test_json.py index d15cfc768011..6b1aaa627138 100644 --- a/packages/db-dtypes/tests/unit/test_json.py +++ b/packages/db-dtypes/tests/unit/test_json.py @@ -20,6 +20,8 @@ import pytest import db_dtypes +import db_dtypes.json + # Check for minimum Pandas version. pytest.importorskip("pandas", minversion="1.5.0") diff --git a/packages/db-dtypes/tests/unit/test_pandas_backports.py b/packages/db-dtypes/tests/unit/test_pandas_backports.py index eb68b6ad2542..cb783045553c 100644 --- a/packages/db-dtypes/tests/unit/test_pandas_backports.py +++ b/packages/db-dtypes/tests/unit/test_pandas_backports.py @@ -35,3 +35,19 @@ def test_import_default_module_not_found(mock_import): default_class = type("OpsMixin", (), {}) # Dummy class result = pandas_backports.import_default("module_name", default=default_class) assert result == default_class + + +@mock.patch("builtins.__import__") +def test_import_default_force_true(mock_import): + """ + Test that when force=True, the default is returned immediately + without attempting an import. + """ + default_class = type("ForcedMixin", (), {}) # A dummy class + + result = pandas_backports.import_default( + "any_module_name", force=True, default=default_class + ) + + # Assert that the returned value is the default class itself + assert result is default_class From 75ec6ded5e8afe87b5d3f9f6c0ba8bf01a500f9c Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 12 May 2025 09:44:58 -0400 Subject: [PATCH 197/210] chore(main): release 1.4.3 (#332) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/db-dtypes/CHANGELOG.md | 14 ++++++++++++++ packages/db-dtypes/db_dtypes/version.py | 2 +- 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/packages/db-dtypes/CHANGELOG.md b/packages/db-dtypes/CHANGELOG.md index 19fe645618a4..836fe08b2c1f 100644 --- a/packages/db-dtypes/CHANGELOG.md +++ b/packages/db-dtypes/CHANGELOG.md @@ -1,5 +1,19 @@ # Changelog +## [1.4.3](https://github.com/googleapis/python-db-dtypes-pandas/compare/v1.4.2...v1.4.3) (2025-05-12) + + +### Bug Fixes + +* Remove setup.cfg configuration for creating universal wheels ([#333](https://github.com/googleapis/python-db-dtypes-pandas/issues/333)) ([44a5aa2](https://github.com/googleapis/python-db-dtypes-pandas/commit/44a5aa24ef3219849e9e0bedeeaebc4b9ee53d9e)) +* Updates noxfile with pip freeze and pandas 1.2 ([#331](https://github.com/googleapis/python-db-dtypes-pandas/issues/331)) ([5eb2a85](https://github.com/googleapis/python-db-dtypes-pandas/commit/5eb2a85acc8292e00eea994e5e5ab54d7d037fda)) + + +### Dependencies + +* Drop support for Python 3.7 and 3.8 ([66f3f0b](https://github.com/googleapis/python-db-dtypes-pandas/commit/66f3f0bed9ed0aa317519f039e8623b0715af38e)) +* Updates deprecation warning to FutureWarning re: 3.7 and 3.8 ([#338](https://github.com/googleapis/python-db-dtypes-pandas/issues/338)) ([158243b](https://github.com/googleapis/python-db-dtypes-pandas/commit/158243b2e039db108cec3fa395d3391161042c4e)) + ## [1.4.2](https://github.com/googleapis/python-db-dtypes-pandas/compare/v1.4.1...v1.4.2) (2025-03-04) diff --git a/packages/db-dtypes/db_dtypes/version.py b/packages/db-dtypes/db_dtypes/version.py index d74293bf0d47..6bc364f2f543 100644 --- a/packages/db-dtypes/db_dtypes/version.py +++ b/packages/db-dtypes/db_dtypes/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "1.4.2" # pragma: NO COVER +__version__ = "1.4.3" # pragma: NO COVER From 65becdca2874619e7c0609b00a46c9fa36cd409c Mon Sep 17 00:00:00 2001 From: Chalmer Lowe Date: Thu, 12 Jun 2025 14:41:15 -0400 Subject: [PATCH 198/210] test: updates tests from pandas with compliance issues (#365) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * test: updates tests with regex matches * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * updates to np.array processing * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * update imports * update docstrings * update docstrings --------- Co-authored-by: Owl Bot --- .../compliance/date/test_date_compliance.py | 59 ++++++++++++++- .../compliance/json/test_json_compliance.py | 73 ++++++++++++++++++- .../compliance/time/test_time_compliance.py | 59 ++++++++++++++- 3 files changed, 184 insertions(+), 7 deletions(-) diff --git a/packages/db-dtypes/tests/compliance/date/test_date_compliance.py b/packages/db-dtypes/tests/compliance/date/test_date_compliance.py index 52b9c042b5fd..ae1ef83d2a41 100644 --- a/packages/db-dtypes/tests/compliance/date/test_date_compliance.py +++ b/packages/db-dtypes/tests/compliance/date/test_date_compliance.py @@ -51,7 +51,15 @@ class TestDtype(base.BaseDtypeTests): class TestGetitem(base.BaseGetitemTests): - pass + def test_take_pandas_style_negative_raises(self, data, na_value): + # This test was failing compliance checks because it attempted to match + # a pytest regex match using an empty string (""), which pytest version + # 8.4.0 stopped allowing. + # The test has been updated in pandas main so that it will + # no longer fail, but the fix is not expected to be released until + # at least pandas version 3.0 (current version is 2.3). + with pytest.raises(ValueError): + data.take([0, -2], fill_value=na_value, allow_fill=True) class TestGroupby(base.BaseGroupbyTests): @@ -63,7 +71,26 @@ class TestIndex(base.BaseIndexTests): class TestInterface(base.BaseInterfaceTests): - pass + def test_array_interface_copy(self, data): + # This test was failing compliance checks due to changes in how + # numpy handles processing when np.array(obj, copy=False). + # Until pandas changes the existing tests, this compliance test + # will continue to fail. + import numpy as np + from pandas.compat.numpy import np_version_gt2 + + result_copy1 = np.array(data, copy=True) + result_copy2 = np.array(data, copy=True) + assert not np.may_share_memory(result_copy1, result_copy2) + + if not np_version_gt2: + # copy=False semantics are only supported in NumPy>=2. + return + + with pytest.raises(ValueError): + result_nocopy1 = np.array(data, copy=False) + result_nocopy2 = np.array(data, copy=False) + assert np.may_share_memory(result_nocopy1, result_nocopy2) class TestMissing(base.BaseMissingTests): @@ -102,6 +129,21 @@ def test_hash_pandas_object(self): further investigation. See issues 182, 183, 185.""" ) + def test_argmax_argmin_no_skipna_notimplemented(self, data_missing_for_sorting): + # This test was failing compliance checks because it attempted to match + # a pytest regex match using an empty string (""), which pytest version + # 8.4.0 stopped allowing. + # The test has been updated in pandas main so that it will + # no longer fail, but the fix is not expected to be released until + # at least pandas version 3.0 (current version is 2.3) + data = data_missing_for_sorting + + with pytest.raises(NotImplementedError): + data.argmin(skipna=False) + + with pytest.raises(NotImplementedError): + data.argmax(skipna=False) + class TestParsing(base.BaseParsingTests): pass @@ -116,7 +158,18 @@ class TestReshaping(base.BaseReshapingTests): class TestSetitem(base.BaseSetitemTests): - pass + # This test was failing compliance checks because it attempted to match + # a pytest regex match using an empty string (""), which pytest version + # 8.4.0 stopped allowing. + # The test has been updated in pandas main so that it will + # no longer fail, but the fix is not expected to be released until + # at least pandas version 3.0 (current version is 2.3). + def test_setitem_invalid(self, data, invalid_scalar): + with pytest.raises((ValueError, TypeError)): + data[0] = invalid_scalar + + with pytest.raises((ValueError, TypeError)): + data[:] = invalid_scalar # NDArrayBacked2DTests suite added in https://github.com/pandas-dev/pandas/pull/44974 diff --git a/packages/db-dtypes/tests/compliance/json/test_json_compliance.py b/packages/db-dtypes/tests/compliance/json/test_json_compliance.py index 9a0d0efbc877..da5b63b2987e 100644 --- a/packages/db-dtypes/tests/compliance/json/test_json_compliance.py +++ b/packages/db-dtypes/tests/compliance/json/test_json_compliance.py @@ -107,6 +107,16 @@ def test_getitem_scalar(self, data): """ super().test_getitem_scalar(data) + def test_take_pandas_style_negative_raises(self, data, na_value): + # This test was failing compliance checks because it attempted to match + # a pytest regex match using an empty string (""), which pytest version + # 8.4.0 stopped allowing. + # The test has been updated in pandas main so that it will + # no longer fail, but the fix is not expected to be released until + # at least pandas version 3.0 (current version is 2.3). + with pytest.raises(ValueError): + data.take([0, -2], fill_value=na_value, allow_fill=True) + class TestJSONArrayIndex(base.BaseIndexTests): pass @@ -133,6 +143,26 @@ def test_array_interface(self, data): def test_view(self, data): super().test_view(data) + def test_array_interface_copy(self, data): + # This test was failing compliance checks due to changes in how + # numpy handles processing when np.array(obj, copy=False). + # Until pandas changes the existing tests, this compliance test + # will continue to fail. + import numpy as np + from pandas.compat.numpy import np_version_gt2 + + result_copy1 = np.array(data, copy=True) + result_copy2 = np.array(data, copy=True) + assert not np.may_share_memory(result_copy1, result_copy2) + + if not np_version_gt2: + # copy=False semantics are only supported in NumPy>=2. + return + + result_nocopy1 = np.array(data, copy=False) + result_nocopy2 = np.array(data, copy=False) + assert not np.may_share_memory(result_nocopy1, result_nocopy2) + class TestJSONArrayParsing(base.BaseParsingTests): @pytest.mark.xfail(reason="data type 'json' not understood") @@ -190,6 +220,21 @@ def test_sort_values(self, data_for_sorting): def test_sort_values_frame(self, data_for_sorting): super().test_sort_values_frame(data_for_sorting) + def test_argmax_argmin_no_skipna_notimplemented(self, data_missing_for_sorting): + # This test was failing compliance checks because it attempted to match + # a pytest regex match using an empty string (""), which pytest version + # 8.4.0 stopped allowing. + # The test has been updated in pandas main so that it will + # no longer fail, but the fix is not expected to be released until + # at least pandas version 3.0 (current version is 2.3) + data = data_missing_for_sorting + + with pytest.raises(NotImplementedError): + data.argmin(skipna=False) + + with pytest.raises(NotImplementedError): + data.argmax(skipna=False) + class TestJSONArrayMissing(base.BaseMissingTests): @pytest.mark.xfail(reason="Setting a dict as a scalar") @@ -239,7 +284,20 @@ class TestJSONArrayPrinting(base.BasePrintingTests): class TestJSONArrayReduce(base.BaseReduceTests): - pass + @pytest.mark.filterwarnings("ignore::RuntimeWarning") + @pytest.mark.parametrize("skipna", [True, False]) + def test_reduce_series_numeric(self, data, all_numeric_reductions, skipna): + op_name = all_numeric_reductions + ser = pd.Series(data) + + if not self._supports_reduction(ser, op_name): + # Sum does not raise an Error (TypeError or otherwise) + if op_name != "sum": + with pytest.raises(TypeError): + getattr(ser, op_name)(skipna=skipna) + else: + # min/max with empty produce numpy warnings + self.check_reduce(ser, op_name, skipna) class TestJSONArrayReshaping(base.BaseReshapingTests): @@ -356,6 +414,19 @@ def test_setitem_mask_boolean_array_with_na(self, data, box_in_series): def test_setitem_preserves_views(self, data): super().test_setitem_preserves_views(data) + def test_setitem_invalid(self, data, invalid_scalar): + # This test was failing compliance checks because it attempted to match + # a pytest regex match using an empty string (""), which pytest version + # 8.4.0 stopped allowing. + # The test has been updated in pandas main so that it will + # no longer fail, but the fix is not expected to be released until + # at least pandas version 3.0 (current version is 2.3) + with pytest.raises((ValueError, TypeError)): + data[0] = invalid_scalar + + with pytest.raises((ValueError, TypeError)): + data[:] = invalid_scalar + class TestJSONArrayDim2Compat(base.Dim2CompatTests): pass diff --git a/packages/db-dtypes/tests/compliance/time/test_time_compliance.py b/packages/db-dtypes/tests/compliance/time/test_time_compliance.py index 118c61dfe27f..99ac5dd99355 100644 --- a/packages/db-dtypes/tests/compliance/time/test_time_compliance.py +++ b/packages/db-dtypes/tests/compliance/time/test_time_compliance.py @@ -56,7 +56,15 @@ class TestDtype(base.BaseDtypeTests): class TestGetitem(base.BaseGetitemTests): - pass + def test_take_pandas_style_negative_raises(self, data, na_value): + # This test was failing compliance checks because it attempted to match + # a pytest regex match using an empty string (""), which pytest version + # 8.4.0 stopped allowing. + # The test has been updated in pandas main so that it will + # no longer fail, but the fix is not expected to be released until + # at least pandas version 3.0 (current version is 2.3). + with pytest.raises(ValueError): + data.take([0, -2], fill_value=na_value, allow_fill=True) class TestGroupby(base.BaseGroupbyTests): @@ -68,7 +76,26 @@ class TestIndex(base.BaseIndexTests): class TestInterface(base.BaseInterfaceTests): - pass + def test_array_interface_copy(self, data): + # This test was failing compliance checks due to changes in how + # numpy handles processing when np.array(obj, copy=False). + # Until pandas changes the existing tests, this compliance test + # will continue to fail. + import numpy as np + from pandas.compat.numpy import np_version_gt2 + + result_copy1 = np.array(data, copy=True) + result_copy2 = np.array(data, copy=True) + assert not np.may_share_memory(result_copy1, result_copy2) + + if not np_version_gt2: + # copy=False semantics are only supported in NumPy>=2. + return + + with pytest.raises(ValueError): + result_nocopy1 = np.array(data, copy=False) + result_nocopy2 = np.array(data, copy=False) + assert np.may_share_memory(result_nocopy1, result_nocopy2) class TestMissing(base.BaseMissingTests): @@ -95,6 +122,21 @@ def test_value_counts(self, all_data, dropna): tm.assert_series_equal(result, expected) + def test_argmax_argmin_no_skipna_notimplemented(self, data_missing_for_sorting): + # This test was failing compliance checks because it attempted to match + # a pytest regex match using an empty string (""), which pytest version + # 8.4.0 stopped allowing. + # The test has been updated in pandas main so that it will + # no longer fail, but the fix is not expected to be released until + # at least pandas version 3.0 (current version is 2.3) + data = data_missing_for_sorting + + with pytest.raises(NotImplementedError): + data.argmin(skipna=False) + + with pytest.raises(NotImplementedError): + data.argmax(skipna=False) + class TestParsing(base.BaseParsingTests): pass @@ -109,4 +151,15 @@ class TestReshaping(base.BaseReshapingTests): class TestSetitem(base.BaseSetitemTests): - pass + def test_setitem_invalid(self, data, invalid_scalar): + # This test was failing compliance checks because it attempted to match + # a pytest regex match using an empty string (""), which pytest version + # 8.4.0 stopped allowing. + # The test has been updated in pandas main so that it will + # no longer fail, but the fix is not expected to be released until + # at least pandas version 3.0 (current version is 2.3) + with pytest.raises((ValueError, TypeError)): + data[0] = invalid_scalar + + with pytest.raises((ValueError, TypeError)): + data[:] = invalid_scalar From 0932043a04f5e460bff4cb85bb25d3e3779ddcad Mon Sep 17 00:00:00 2001 From: Chalmer Lowe Date: Fri, 13 Jun 2025 12:05:12 -0400 Subject: [PATCH 199/210] test: updates python versions and sets owlbot excludes (#367) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * updates to prevent changes to docs and lint.ymls * updates lint python version to 3.10 * remove comments, update lint python version * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Mend Renovate Co-authored-by: Owl Bot --- packages/db-dtypes/.github/workflows/lint.yml | 2 +- packages/db-dtypes/noxfile.py | 11 +++-------- packages/db-dtypes/owlbot.py | 12 +++++++++++- .../db-dtypes/samples/snippets/requirements-test.txt | 2 +- 4 files changed, 16 insertions(+), 11 deletions(-) diff --git a/packages/db-dtypes/.github/workflows/lint.yml b/packages/db-dtypes/.github/workflows/lint.yml index 4866193af2a9..1051da0bdda4 100644 --- a/packages/db-dtypes/.github/workflows/lint.yml +++ b/packages/db-dtypes/.github/workflows/lint.yml @@ -12,7 +12,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v5 with: - python-version: "3.8" + python-version: "3.10" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel diff --git a/packages/db-dtypes/noxfile.py b/packages/db-dtypes/noxfile.py index a2444ac9f54d..125d4f1c8a42 100644 --- a/packages/db-dtypes/noxfile.py +++ b/packages/db-dtypes/noxfile.py @@ -33,6 +33,7 @@ LINT_PATHS = ["docs", "db_dtypes", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.9" +LINT_PYTHON_VERSION = "3.10" UNIT_TEST_PYTHON_VERSIONS: List[str] = [ "3.9", @@ -86,10 +87,7 @@ nox.options.error_on_missing_interpreters = True -# TODO: the linting process still uses python 3.8. -# As soon as that gets upgraded, we should be able to revert this session -# to using the DEFAULT_PYTHON_VERSION. -@nox.session(python="3.8") +@nox.session(python=LINT_PYTHON_VERSION) def lint(session): """Run linters. @@ -142,10 +140,7 @@ def format(session): ) -# TODO: the linting process still uses python 3.8. -# As soon as that gets upgraded, we should be able to revert this session -# to using the DEFAULT_PYTHON_VERSION. -@nox.session(python="3.8") +@nox.session(python=LINT_PYTHON_VERSION) def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" session.install("docutils", "pygments") diff --git a/packages/db-dtypes/owlbot.py b/packages/db-dtypes/owlbot.py index 04664d816049..93572a0b2680 100644 --- a/packages/db-dtypes/owlbot.py +++ b/packages/db-dtypes/owlbot.py @@ -34,7 +34,17 @@ "pandas": "https://pandas.pydata.org/pandas-docs/stable/" }, ) -s.move(templated_files, excludes=["docs/multiprocessing.rst", "README.rst", ".github/workflows/unittest.yml", "noxfile.py"]) +s.move( + templated_files, + excludes=[ + "docs/multiprocessing.rst", + "README.rst", + ".github/workflows/unittest.yml", + ".github/workflows/docs.yml", # to avoid overwriting python version + ".github/workflows/lint.yml", # to avoid overwriting python version + "noxfile.py", + ] +) # ---------------------------------------------------------------------------- # Fixup files diff --git a/packages/db-dtypes/samples/snippets/requirements-test.txt b/packages/db-dtypes/samples/snippets/requirements-test.txt index 2c78728ca5d7..69d461f3fc4c 100644 --- a/packages/db-dtypes/samples/snippets/requirements-test.txt +++ b/packages/db-dtypes/samples/snippets/requirements-test.txt @@ -1 +1 @@ -pytest==8.3.5 +pytest==8.4.0 From 74914f0889055a1064342fc64cdbbc7fe0c06074 Mon Sep 17 00:00:00 2001 From: Chalmer Lowe Date: Wed, 18 Jun 2025 13:50:16 -0400 Subject: [PATCH 200/210] chore: ignore docs.yml and lint.yml in renovate (#369) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: ignore docs.yml and lint.yml in renovate This change updates renovate.json to prevent renovate-bot from updating the specified workflow files. * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: exclude renovate.json from owlbot updates This ensures that owlbot does not overwrite the changes made to `renovate.json`. * Update owlbot.py * Update owlbot.py --------- Co-authored-by: google-labs-jules[bot] <161369871+google-labs-jules[bot]@users.noreply.github.com> Co-authored-by: Owl Bot --- packages/db-dtypes/owlbot.py | 4 ++++ packages/db-dtypes/renovate.json | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/packages/db-dtypes/owlbot.py b/packages/db-dtypes/owlbot.py index 93572a0b2680..860632482078 100644 --- a/packages/db-dtypes/owlbot.py +++ b/packages/db-dtypes/owlbot.py @@ -43,6 +43,10 @@ ".github/workflows/docs.yml", # to avoid overwriting python version ".github/workflows/lint.yml", # to avoid overwriting python version "noxfile.py", + "renovate.json", # to avoid overwriting the ignorePaths list additions: + # ".github/workflows/docs.yml AND lint.yml" specifically + # the version of python referenced in each of those files. + # Currently renovate bot wants to change 3.10 to 3.13. ] ) diff --git a/packages/db-dtypes/renovate.json b/packages/db-dtypes/renovate.json index c7875c469bd5..9d9a6d0bb8cc 100644 --- a/packages/db-dtypes/renovate.json +++ b/packages/db-dtypes/renovate.json @@ -5,7 +5,7 @@ ":preserveSemverRanges", ":disableDependencyDashboard" ], - "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py", ".github/workflows/unittest.yml"], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py", ".github/workflows/unittest.yml", ".github/workflows/docs.yml", ".github/workflows/lint.yml"], "pip_requirements": { "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] } From 3f3a853131e9e8da1c7698ff127c96c91daeaadc Mon Sep 17 00:00:00 2001 From: Chalmer Lowe Date: Tue, 8 Jul 2025 13:20:07 -0400 Subject: [PATCH 201/210] deps: adds limits to numpy installs under 3.10 for dataproc (#370) * deps: adds limits to numpy installs under 3.10 for dataproc * tweaks some version numbers as a test --- packages/db-dtypes/setup.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/db-dtypes/setup.py b/packages/db-dtypes/setup.py index 80a69b6ab022..093bf2ed942f 100644 --- a/packages/db-dtypes/setup.py +++ b/packages/db-dtypes/setup.py @@ -30,7 +30,8 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "numpy >= 1.24.0", + "numpy >= 1.24.0, <= 2.2.6 ; python_version == '3.10'", + "numpy >= 1.24.0 ; python_version != '3.10'", "packaging >= 24.2.0", "pandas >= 1.5.3", "pyarrow >= 13.0.0", From e8465e2779b648c93529454930daac9323d33afa Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 1 Aug 2025 19:28:05 +0200 Subject: [PATCH 202/210] chore(deps): update dependency pytest to v8.4.1 (#368) * chore(deps): update all dependencies * Update docs.yml * Update lint.yml --------- Co-authored-by: Lingqing Gan --- packages/db-dtypes/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/db-dtypes/samples/snippets/requirements-test.txt b/packages/db-dtypes/samples/snippets/requirements-test.txt index 69d461f3fc4c..8e0e8841f28c 100644 --- a/packages/db-dtypes/samples/snippets/requirements-test.txt +++ b/packages/db-dtypes/samples/snippets/requirements-test.txt @@ -1 +1 @@ -pytest==8.4.0 +pytest==8.4.1 From 781cb5b87653cf17ee8d5fed10624f673bf4a29e Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 11 Aug 2025 22:45:35 +0200 Subject: [PATCH 203/210] chore(deps): update actions/checkout action to v5 (#372) --- packages/db-dtypes/.github/workflows/compliance.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/db-dtypes/.github/workflows/compliance.yml b/packages/db-dtypes/.github/workflows/compliance.yml index 03695095940b..2d10094e9281 100644 --- a/packages/db-dtypes/.github/workflows/compliance.yml +++ b/packages/db-dtypes/.github/workflows/compliance.yml @@ -11,7 +11,7 @@ jobs: python: ['3.13'] steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@v5 - name: Setup Python uses: actions/setup-python@v5 with: @@ -32,7 +32,7 @@ jobs: python: ['3.13'] steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@v5 - name: Setup Python uses: actions/setup-python@v5 with: From 8ae44757f5ee6b5a5c60efa32c49cdf530bd169f Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 8 Sep 2025 22:59:06 +0200 Subject: [PATCH 204/210] chore(deps): update all dependencies (#373) --- packages/db-dtypes/.github/workflows/compliance.yml | 4 ++-- packages/db-dtypes/samples/snippets/requirements-test.txt | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/db-dtypes/.github/workflows/compliance.yml b/packages/db-dtypes/.github/workflows/compliance.yml index 2d10094e9281..39b9ae7a98ca 100644 --- a/packages/db-dtypes/.github/workflows/compliance.yml +++ b/packages/db-dtypes/.github/workflows/compliance.yml @@ -13,7 +13,7 @@ jobs: - name: Checkout uses: actions/checkout@v5 - name: Setup Python - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: ${{ matrix.python }} - name: Install nox @@ -34,7 +34,7 @@ jobs: - name: Checkout uses: actions/checkout@v5 - name: Setup Python - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: ${{ matrix.python }} - name: Install nox diff --git a/packages/db-dtypes/samples/snippets/requirements-test.txt b/packages/db-dtypes/samples/snippets/requirements-test.txt index 8e0e8841f28c..9471b3d92fbb 100644 --- a/packages/db-dtypes/samples/snippets/requirements-test.txt +++ b/packages/db-dtypes/samples/snippets/requirements-test.txt @@ -1 +1 @@ -pytest==8.4.1 +pytest==8.4.2 From 5b9f4e4d33635a629d4643c8e7edaa1da8f43fe6 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 11 Nov 2025 12:13:04 -0500 Subject: [PATCH 205/210] chore(main): release 1.4.4 (#371) :robot: I have created a release *beep* *boop* --- ## [1.4.4](https://github.com/googleapis/python-db-dtypes-pandas/compare/v1.4.3...v1.4.4) (2025-09-08) ### Dependencies * Adds limits to numpy installs under 3.10 for dataproc ([#370](https://github.com/googleapis/python-db-dtypes-pandas/issues/370)) ([6d79280](https://github.com/googleapis/python-db-dtypes-pandas/commit/6d79280f54b662accd80a571412ac0fcae206abb)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/db-dtypes/CHANGELOG.md | 7 +++++++ packages/db-dtypes/db_dtypes/version.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/db-dtypes/CHANGELOG.md b/packages/db-dtypes/CHANGELOG.md index 836fe08b2c1f..159a09534a48 100644 --- a/packages/db-dtypes/CHANGELOG.md +++ b/packages/db-dtypes/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.4.4](https://github.com/googleapis/python-db-dtypes-pandas/compare/v1.4.3...v1.4.4) (2025-09-08) + + +### Dependencies + +* Adds limits to numpy installs under 3.10 for dataproc ([#370](https://github.com/googleapis/python-db-dtypes-pandas/issues/370)) ([6d79280](https://github.com/googleapis/python-db-dtypes-pandas/commit/6d79280f54b662accd80a571412ac0fcae206abb)) + ## [1.4.3](https://github.com/googleapis/python-db-dtypes-pandas/compare/v1.4.2...v1.4.3) (2025-05-12) diff --git a/packages/db-dtypes/db_dtypes/version.py b/packages/db-dtypes/db_dtypes/version.py index 6bc364f2f543..371002800d4a 100644 --- a/packages/db-dtypes/db_dtypes/version.py +++ b/packages/db-dtypes/db_dtypes/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "1.4.3" # pragma: NO COVER +__version__ = "1.4.4" # pragma: NO COVER From a261e832bb1b243197387703dcf31ff18a74dd16 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 11 Nov 2025 13:14:53 -0500 Subject: [PATCH 206/210] chore(librarian): onboard to librarian (#374) Towards https://github.com/googleapis/librarian/issues/2456 --- packages/db-dtypes/.github/.OwlBot.lock.yaml | 17 ----- packages/db-dtypes/.github/.OwlBot.yaml | 18 ----- packages/db-dtypes/.github/auto-approve.yml | 3 - packages/db-dtypes/.github/release-please.yml | 2 - .../db-dtypes/.github/release-trigger.yml | 2 - .../db-dtypes/.github/sync-repo-settings.yaml | 34 --------- packages/db-dtypes/.librarian/state.yaml | 10 +++ packages/db-dtypes/CHANGELOG.md | 4 + packages/db-dtypes/owlbot.py | 73 ------------------- 9 files changed, 14 insertions(+), 149 deletions(-) delete mode 100644 packages/db-dtypes/.github/.OwlBot.lock.yaml delete mode 100644 packages/db-dtypes/.github/.OwlBot.yaml delete mode 100644 packages/db-dtypes/.github/auto-approve.yml delete mode 100644 packages/db-dtypes/.github/release-please.yml delete mode 100644 packages/db-dtypes/.github/release-trigger.yml delete mode 100644 packages/db-dtypes/.github/sync-repo-settings.yaml create mode 100644 packages/db-dtypes/.librarian/state.yaml delete mode 100644 packages/db-dtypes/owlbot.py diff --git a/packages/db-dtypes/.github/.OwlBot.lock.yaml b/packages/db-dtypes/.github/.OwlBot.lock.yaml deleted file mode 100644 index c631e1f7d7e9..000000000000 --- a/packages/db-dtypes/.github/.OwlBot.lock.yaml +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -docker: - image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:5581906b957284864632cde4e9c51d1cc66b0094990b27e689132fe5cd036046 -# created: 2025-03-05 diff --git a/packages/db-dtypes/.github/.OwlBot.yaml b/packages/db-dtypes/.github/.OwlBot.yaml deleted file mode 100644 index 57184d996cbd..000000000000 --- a/packages/db-dtypes/.github/.OwlBot.yaml +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright 2021 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -docker: - image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - -begin-after-commit-hash: be22498ce258bf2d5fe12fd696d3ad9a2b6c430e diff --git a/packages/db-dtypes/.github/auto-approve.yml b/packages/db-dtypes/.github/auto-approve.yml deleted file mode 100644 index 311ebbb853a9..000000000000 --- a/packages/db-dtypes/.github/auto-approve.yml +++ /dev/null @@ -1,3 +0,0 @@ -# https://github.com/googleapis/repo-automation-bots/tree/main/packages/auto-approve -processes: - - "OwlBotTemplateChanges" diff --git a/packages/db-dtypes/.github/release-please.yml b/packages/db-dtypes/.github/release-please.yml deleted file mode 100644 index 466597e5b196..000000000000 --- a/packages/db-dtypes/.github/release-please.yml +++ /dev/null @@ -1,2 +0,0 @@ -releaseType: python -handleGHRelease: true diff --git a/packages/db-dtypes/.github/release-trigger.yml b/packages/db-dtypes/.github/release-trigger.yml deleted file mode 100644 index 134780d1ab6f..000000000000 --- a/packages/db-dtypes/.github/release-trigger.yml +++ /dev/null @@ -1,2 +0,0 @@ -enabled: true -multiScmName: python-db-dtypes-pandas diff --git a/packages/db-dtypes/.github/sync-repo-settings.yaml b/packages/db-dtypes/.github/sync-repo-settings.yaml deleted file mode 100644 index 55cd410a768a..000000000000 --- a/packages/db-dtypes/.github/sync-repo-settings.yaml +++ /dev/null @@ -1,34 +0,0 @@ -# https://github.com/googleapis/repo-automation-bots/tree/main/packages/sync-repo-settings -# Rules for main branch protection -branchProtectionRules: -# Identifies the protection rule pattern. Name of the branch to be protected. -# Defaults to `main` -- pattern: main - requiresCodeOwnerReviews: true - requiresStrictStatusChecks: false - requiredStatusCheckContexts: - - 'conventionalcommits.org' - - 'cla/google' - - 'docs' - - 'lint' - - 'unit (3.9)' - - 'unit (3.10)' - - 'unit (3.11)' - - 'unit (3.12)' - - 'unit (3.13)' - - 'cover' -permissionRules: - - team: actools-python - permission: admin - - team: actools - permission: admin - - team: api-bigquery - permission: push - - team: api-bigquery-dataframe - permission: push - - team: yoshi-python - permission: push - - team: python-samples-owners - permission: push - - team: python-samples-reviewers - permission: push \ No newline at end of file diff --git a/packages/db-dtypes/.librarian/state.yaml b/packages/db-dtypes/.librarian/state.yaml new file mode 100644 index 000000000000..d3198f2f8858 --- /dev/null +++ b/packages/db-dtypes/.librarian/state.yaml @@ -0,0 +1,10 @@ +image: us-central1-docker.pkg.dev/cloud-sdk-librarian-prod/images-prod/python-librarian-generator@sha256:39628f6e89c9cad27973b9a39a50f7052bec0435ee58c7027b4fa6b655943e31 +libraries: + - id: db-dtypes + version: 1.4.3 + apis: [] + source_roots: + - . + preserve_regex: [] + remove_regex: [] + tag_format: v{version} diff --git a/packages/db-dtypes/CHANGELOG.md b/packages/db-dtypes/CHANGELOG.md index 159a09534a48..b34a68cabd0b 100644 --- a/packages/db-dtypes/CHANGELOG.md +++ b/packages/db-dtypes/CHANGELOG.md @@ -1,5 +1,9 @@ # Changelog +[PyPI History][1] + +[1]: https://pypi.org/project/db-dtypes/#history + ## [1.4.4](https://github.com/googleapis/python-db-dtypes-pandas/compare/v1.4.3...v1.4.4) (2025-09-08) diff --git a/packages/db-dtypes/owlbot.py b/packages/db-dtypes/owlbot.py deleted file mode 100644 index 860632482078..000000000000 --- a/packages/db-dtypes/owlbot.py +++ /dev/null @@ -1,73 +0,0 @@ -# Copyright 2021 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This script is used to synthesize generated parts of this library.""" - -import pathlib - -import synthtool as s -from synthtool import gcp -from synthtool.languages import python - -REPO_ROOT = pathlib.Path(__file__).parent.absolute() - -common = gcp.CommonTemplates() - -# ---------------------------------------------------------------------------- -# Add templated files -# ---------------------------------------------------------------------------- -templated_files = common.py_library( - system_test_python_versions=["3.9"], - cov_level=100, - intersphinx_dependencies={ - "pandas": "https://pandas.pydata.org/pandas-docs/stable/" - }, -) -s.move( - templated_files, - excludes=[ - "docs/multiprocessing.rst", - "README.rst", - ".github/workflows/unittest.yml", - ".github/workflows/docs.yml", # to avoid overwriting python version - ".github/workflows/lint.yml", # to avoid overwriting python version - "noxfile.py", - "renovate.json", # to avoid overwriting the ignorePaths list additions: - # ".github/workflows/docs.yml AND lint.yml" specifically - # the version of python referenced in each of those files. - # Currently renovate bot wants to change 3.10 to 3.13. - ] -) - -# ---------------------------------------------------------------------------- -# Fixup files -# ---------------------------------------------------------------------------- - -s.replace( - [".coveragerc"], "google/cloud/__init__.py", "db_dtypes/requirements.py", -) - -# ---------------------------------------------------------------------------- -# Samples templates -# ---------------------------------------------------------------------------- - -python.py_samples(skip_readmes=True) - -# ---------------------------------------------------------------------------- -# Final cleanup -# ---------------------------------------------------------------------------- - -s.shell.run(["nox", "-s", "blacken"], hide_output=False) -for noxfile in REPO_ROOT.glob("samples/**/noxfile.py"): - s.shell.run(["nox", "-s", "blacken"], cwd=noxfile.parent, hide_output=False) From f1d490c790b49043108d3d86c4bdcb4b6cae8024 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 11 Nov 2025 14:25:38 -0500 Subject: [PATCH 207/210] chore(librarian): update sha to support librarian 1.0.0 (#376) --- packages/db-dtypes/.librarian/state.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/db-dtypes/.librarian/state.yaml b/packages/db-dtypes/.librarian/state.yaml index d3198f2f8858..c3f1f7c56b99 100644 --- a/packages/db-dtypes/.librarian/state.yaml +++ b/packages/db-dtypes/.librarian/state.yaml @@ -1,4 +1,4 @@ -image: us-central1-docker.pkg.dev/cloud-sdk-librarian-prod/images-prod/python-librarian-generator@sha256:39628f6e89c9cad27973b9a39a50f7052bec0435ee58c7027b4fa6b655943e31 +image: us-central1-docker.pkg.dev/cloud-sdk-librarian-prod/images-prod/python-librarian-generator@sha256:c8612d3fffb3f6a32353b2d1abd16b61e87811866f7ec9d65b59b02eb452a620 libraries: - id: db-dtypes version: 1.4.3 From dee2f65a1444f5fb11e13cf8c7db7bd1b04d26a9 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Wed, 19 Nov 2025 15:46:44 -0800 Subject: [PATCH 208/210] chore(tests): run unit tests after merge (#378) This adds an additional trigger to the unit tests to run post-submit as well --- packages/db-dtypes/.github/workflows/unittest.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/packages/db-dtypes/.github/workflows/unittest.yml b/packages/db-dtypes/.github/workflows/unittest.yml index 41694c386562..6e05ae54c77b 100644 --- a/packages/db-dtypes/.github/workflows/unittest.yml +++ b/packages/db-dtypes/.github/workflows/unittest.yml @@ -2,6 +2,9 @@ on: pull_request: branches: - main + push: + branches: + - main name: unittest jobs: unit: From 483757246da2c8de03d6bff5483daeda60a7bf57 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 24 Nov 2025 22:38:28 +0000 Subject: [PATCH 209/210] Trigger owlbot post-processor --- owl-bot-staging/db-dtypes/db-dtypes/db-dtypes.txt | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 owl-bot-staging/db-dtypes/db-dtypes/db-dtypes.txt diff --git a/owl-bot-staging/db-dtypes/db-dtypes/db-dtypes.txt b/owl-bot-staging/db-dtypes/db-dtypes/db-dtypes.txt new file mode 100644 index 000000000000..e69de29bb2d1 From 12e5c3f2d9804f144c7e83eeabe66e1ad0a85c3f Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 24 Nov 2025 22:38:35 +0000 Subject: [PATCH 210/210] build: db-dtypes migration: adjust owlbot-related files --- packages/db-dtypes/.github/CODEOWNERS | 12 - packages/db-dtypes/.github/CONTRIBUTING.md | 28 - .../.github/ISSUE_TEMPLATE/bug_report.md | 43 -- .../.github/ISSUE_TEMPLATE/feature_request.md | 18 - .../.github/ISSUE_TEMPLATE/support_request.md | 7 - .../.github/PULL_REQUEST_TEMPLATE.md | 7 - packages/db-dtypes/.github/auto-label.yaml | 20 - packages/db-dtypes/.github/blunderbuss.yml | 17 - .../db-dtypes/.github/header-checker-lint.yml | 15 - packages/db-dtypes/.github/snippet-bot.yml | 0 .../.github/workflows/compliance.yml | 48 -- packages/db-dtypes/.github/workflows/docs.yml | 38 -- packages/db-dtypes/.github/workflows/lint.yml | 25 - .../db-dtypes/.github/workflows/unittest.yml | 120 ----- packages/db-dtypes/.kokoro/build.sh | 60 --- .../db-dtypes/.kokoro/continuous/common.cfg | 27 - .../.kokoro/continuous/continuous.cfg | 1 - .../.kokoro/continuous/prerelease-deps.cfg | 7 - .../db-dtypes/.kokoro/populate-secrets.sh | 43 -- .../db-dtypes/.kokoro/presubmit/common.cfg | 27 - .../.kokoro/presubmit/prerelease-deps.cfg | 7 - .../db-dtypes/.kokoro/presubmit/presubmit.cfg | 1 - .../db-dtypes/.kokoro/samples/lint/common.cfg | 34 -- .../.kokoro/samples/lint/continuous.cfg | 6 - .../.kokoro/samples/lint/periodic.cfg | 6 - .../.kokoro/samples/lint/presubmit.cfg | 6 - .../.kokoro/samples/python3.10/common.cfg | 40 -- .../.kokoro/samples/python3.10/continuous.cfg | 6 - .../samples/python3.10/periodic-head.cfg | 11 - .../.kokoro/samples/python3.10/periodic.cfg | 6 - .../.kokoro/samples/python3.10/presubmit.cfg | 6 - .../.kokoro/samples/python3.11/common.cfg | 40 -- .../.kokoro/samples/python3.11/continuous.cfg | 6 - .../samples/python3.11/periodic-head.cfg | 11 - .../.kokoro/samples/python3.11/periodic.cfg | 6 - .../.kokoro/samples/python3.11/presubmit.cfg | 6 - .../.kokoro/samples/python3.12/common.cfg | 40 -- .../.kokoro/samples/python3.12/continuous.cfg | 6 - .../samples/python3.12/periodic-head.cfg | 11 - .../.kokoro/samples/python3.12/periodic.cfg | 6 - .../.kokoro/samples/python3.12/presubmit.cfg | 6 - .../.kokoro/samples/python3.13/common.cfg | 40 -- .../.kokoro/samples/python3.13/continuous.cfg | 6 - .../samples/python3.13/periodic-head.cfg | 11 - .../.kokoro/samples/python3.13/periodic.cfg | 6 - .../.kokoro/samples/python3.13/presubmit.cfg | 6 - .../.kokoro/samples/python3.7/common.cfg | 40 -- .../.kokoro/samples/python3.7/continuous.cfg | 6 - .../samples/python3.7/periodic-head.cfg | 11 - .../.kokoro/samples/python3.7/periodic.cfg | 6 - .../.kokoro/samples/python3.7/presubmit.cfg | 6 - .../.kokoro/samples/python3.8/common.cfg | 40 -- .../.kokoro/samples/python3.8/continuous.cfg | 6 - .../samples/python3.8/periodic-head.cfg | 11 - .../.kokoro/samples/python3.8/periodic.cfg | 6 - .../.kokoro/samples/python3.8/presubmit.cfg | 6 - .../.kokoro/samples/python3.9/common.cfg | 40 -- .../.kokoro/samples/python3.9/continuous.cfg | 6 - .../samples/python3.9/periodic-head.cfg | 11 - .../.kokoro/samples/python3.9/periodic.cfg | 6 - .../.kokoro/samples/python3.9/presubmit.cfg | 6 - .../.kokoro/test-samples-against-head.sh | 26 - .../db-dtypes/.kokoro/test-samples-impl.sh | 103 ---- packages/db-dtypes/.kokoro/test-samples.sh | 44 -- packages/db-dtypes/.kokoro/trampoline.sh | 28 - packages/db-dtypes/.kokoro/trampoline_v2.sh | 487 ------------------ packages/db-dtypes/.trampolinerc | 61 --- packages/db-dtypes/docs/changelog.md | 1 - 68 files changed, 1856 deletions(-) delete mode 100644 packages/db-dtypes/.github/CODEOWNERS delete mode 100644 packages/db-dtypes/.github/CONTRIBUTING.md delete mode 100644 packages/db-dtypes/.github/ISSUE_TEMPLATE/bug_report.md delete mode 100644 packages/db-dtypes/.github/ISSUE_TEMPLATE/feature_request.md delete mode 100644 packages/db-dtypes/.github/ISSUE_TEMPLATE/support_request.md delete mode 100644 packages/db-dtypes/.github/PULL_REQUEST_TEMPLATE.md delete mode 100644 packages/db-dtypes/.github/auto-label.yaml delete mode 100644 packages/db-dtypes/.github/blunderbuss.yml delete mode 100644 packages/db-dtypes/.github/header-checker-lint.yml delete mode 100644 packages/db-dtypes/.github/snippet-bot.yml delete mode 100644 packages/db-dtypes/.github/workflows/compliance.yml delete mode 100644 packages/db-dtypes/.github/workflows/docs.yml delete mode 100644 packages/db-dtypes/.github/workflows/lint.yml delete mode 100644 packages/db-dtypes/.github/workflows/unittest.yml delete mode 100755 packages/db-dtypes/.kokoro/build.sh delete mode 100644 packages/db-dtypes/.kokoro/continuous/common.cfg delete mode 100644 packages/db-dtypes/.kokoro/continuous/continuous.cfg delete mode 100644 packages/db-dtypes/.kokoro/continuous/prerelease-deps.cfg delete mode 100755 packages/db-dtypes/.kokoro/populate-secrets.sh delete mode 100644 packages/db-dtypes/.kokoro/presubmit/common.cfg delete mode 100644 packages/db-dtypes/.kokoro/presubmit/prerelease-deps.cfg delete mode 100644 packages/db-dtypes/.kokoro/presubmit/presubmit.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/lint/common.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/lint/continuous.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/lint/periodic.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/lint/presubmit.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/python3.10/common.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/python3.10/continuous.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/python3.10/periodic-head.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/python3.10/periodic.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/python3.10/presubmit.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/python3.11/common.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/python3.11/continuous.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/python3.11/periodic-head.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/python3.11/periodic.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/python3.11/presubmit.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/python3.12/common.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/python3.12/continuous.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/python3.12/periodic-head.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/python3.12/periodic.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/python3.12/presubmit.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/python3.13/common.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/python3.13/continuous.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/python3.13/periodic-head.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/python3.13/periodic.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/python3.13/presubmit.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/python3.7/common.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/python3.7/continuous.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/python3.7/periodic-head.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/python3.7/periodic.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/python3.7/presubmit.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/python3.8/common.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/python3.8/continuous.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/python3.8/periodic-head.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/python3.8/periodic.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/python3.8/presubmit.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/python3.9/common.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/python3.9/continuous.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/python3.9/periodic-head.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/python3.9/periodic.cfg delete mode 100644 packages/db-dtypes/.kokoro/samples/python3.9/presubmit.cfg delete mode 100755 packages/db-dtypes/.kokoro/test-samples-against-head.sh delete mode 100755 packages/db-dtypes/.kokoro/test-samples-impl.sh delete mode 100755 packages/db-dtypes/.kokoro/test-samples.sh delete mode 100755 packages/db-dtypes/.kokoro/trampoline.sh delete mode 100755 packages/db-dtypes/.kokoro/trampoline_v2.sh delete mode 100644 packages/db-dtypes/.trampolinerc delete mode 120000 packages/db-dtypes/docs/changelog.md diff --git a/packages/db-dtypes/.github/CODEOWNERS b/packages/db-dtypes/.github/CODEOWNERS deleted file mode 100644 index 193b4363d07e..000000000000 --- a/packages/db-dtypes/.github/CODEOWNERS +++ /dev/null @@ -1,12 +0,0 @@ -# Code owners file. -# This file controls who is tagged for review for any given pull request. -# -# For syntax help see: -# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax -# Note: This file is autogenerated. To make changes to the codeowner team, please update .repo-metadata.json. - -# @googleapis/yoshi-python @googleapis/api-bigquery are the default owners for changes in this repo -* @googleapis/yoshi-python @googleapis/api-bigquery - -# @googleapis/python-samples-reviewers @googleapis/api-bigquery are the default owners for samples changes -/samples/ @googleapis/python-samples-reviewers @googleapis/api-bigquery diff --git a/packages/db-dtypes/.github/CONTRIBUTING.md b/packages/db-dtypes/.github/CONTRIBUTING.md deleted file mode 100644 index 939e5341e74d..000000000000 --- a/packages/db-dtypes/.github/CONTRIBUTING.md +++ /dev/null @@ -1,28 +0,0 @@ -# How to Contribute - -We'd love to accept your patches and contributions to this project. There are -just a few small guidelines you need to follow. - -## Contributor License Agreement - -Contributions to this project must be accompanied by a Contributor License -Agreement. You (or your employer) retain the copyright to your contribution; -this simply gives us permission to use and redistribute your contributions as -part of the project. Head over to to see -your current agreements on file or to sign a new one. - -You generally only need to submit a CLA once, so if you've already submitted one -(even if it was for a different project), you probably don't need to do it -again. - -## Code reviews - -All submissions, including submissions by project members, require review. We -use GitHub pull requests for this purpose. Consult -[GitHub Help](https://help.github.com/articles/about-pull-requests/) for more -information on using pull requests. - -## Community Guidelines - -This project follows [Google's Open Source Community -Guidelines](https://opensource.google.com/conduct/). diff --git a/packages/db-dtypes/.github/ISSUE_TEMPLATE/bug_report.md b/packages/db-dtypes/.github/ISSUE_TEMPLATE/bug_report.md deleted file mode 100644 index 9cf101f8b340..000000000000 --- a/packages/db-dtypes/.github/ISSUE_TEMPLATE/bug_report.md +++ /dev/null @@ -1,43 +0,0 @@ ---- -name: Bug report -about: Create a report to help us improve - ---- - -Thanks for stopping by to let us know something could be better! - -**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. - -Please run down the following list and make sure you've tried the usual "quick fixes": - - - Search the issues already opened: https://github.com/googleapis/python-db-dtypes-pandas/issues - - Search StackOverflow: https://stackoverflow.com/questions/tagged/google-cloud-platform+python - -If you are still having issues, please be sure to include as much information as possible: - -#### Environment details - - - OS type and version: - - Python version: `python --version` - - pip version: `pip --version` - - `db-dtypes` version: `pip show db-dtypes` - -#### Steps to reproduce - - 1. ? - 2. ? - -#### Code example - -```python -# example -``` - -#### Stack trace -``` -# example -``` - -Making sure to follow these steps will guarantee the quickest resolution possible. - -Thanks! diff --git a/packages/db-dtypes/.github/ISSUE_TEMPLATE/feature_request.md b/packages/db-dtypes/.github/ISSUE_TEMPLATE/feature_request.md deleted file mode 100644 index 6365857f33c6..000000000000 --- a/packages/db-dtypes/.github/ISSUE_TEMPLATE/feature_request.md +++ /dev/null @@ -1,18 +0,0 @@ ---- -name: Feature request -about: Suggest an idea for this library - ---- - -Thanks for stopping by to let us know something could be better! - -**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. - - **Is your feature request related to a problem? Please describe.** -A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] - **Describe the solution you'd like** -A clear and concise description of what you want to happen. - **Describe alternatives you've considered** -A clear and concise description of any alternative solutions or features you've considered. - **Additional context** -Add any other context or screenshots about the feature request here. diff --git a/packages/db-dtypes/.github/ISSUE_TEMPLATE/support_request.md b/packages/db-dtypes/.github/ISSUE_TEMPLATE/support_request.md deleted file mode 100644 index 995869032125..000000000000 --- a/packages/db-dtypes/.github/ISSUE_TEMPLATE/support_request.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -name: Support request -about: If you have a support contract with Google, please create an issue in the Google Cloud Support console. - ---- - -**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. diff --git a/packages/db-dtypes/.github/PULL_REQUEST_TEMPLATE.md b/packages/db-dtypes/.github/PULL_REQUEST_TEMPLATE.md deleted file mode 100644 index 710f27b75a8c..000000000000 --- a/packages/db-dtypes/.github/PULL_REQUEST_TEMPLATE.md +++ /dev/null @@ -1,7 +0,0 @@ -Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: -- [ ] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/python-db-dtypes-pandas/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea -- [ ] Ensure the tests and linter pass -- [ ] Code coverage does not decrease (if any source code was changed) -- [ ] Appropriate docs were updated (if necessary) - -Fixes # 🦕 diff --git a/packages/db-dtypes/.github/auto-label.yaml b/packages/db-dtypes/.github/auto-label.yaml deleted file mode 100644 index 21786a4eb085..000000000000 --- a/packages/db-dtypes/.github/auto-label.yaml +++ /dev/null @@ -1,20 +0,0 @@ -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -requestsize: - enabled: true - -path: - pullrequest: true - paths: - samples: "samples" diff --git a/packages/db-dtypes/.github/blunderbuss.yml b/packages/db-dtypes/.github/blunderbuss.yml deleted file mode 100644 index 5b7383dc7665..000000000000 --- a/packages/db-dtypes/.github/blunderbuss.yml +++ /dev/null @@ -1,17 +0,0 @@ -# Blunderbuss config -# -# This file controls who is assigned for pull requests and issues. -# Note: This file is autogenerated. To make changes to the assignee -# team, please update `codeowner_team` in `.repo-metadata.json`. -assign_issues: - - googleapis/api-bigquery - -assign_issues_by: - - labels: - - "samples" - to: - - googleapis/python-samples-reviewers - - googleapis/api-bigquery - -assign_prs: - - googleapis/api-bigquery diff --git a/packages/db-dtypes/.github/header-checker-lint.yml b/packages/db-dtypes/.github/header-checker-lint.yml deleted file mode 100644 index 6fe78aa7987a..000000000000 --- a/packages/db-dtypes/.github/header-checker-lint.yml +++ /dev/null @@ -1,15 +0,0 @@ -{"allowedCopyrightHolders": ["Google LLC"], - "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"], - "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt", "**/__init__.py", "samples/**/constraints.txt", "samples/**/constraints-test.txt"], - "sourceFileExtensions": [ - "ts", - "js", - "java", - "sh", - "Dockerfile", - "yaml", - "py", - "html", - "txt" - ] -} \ No newline at end of file diff --git a/packages/db-dtypes/.github/snippet-bot.yml b/packages/db-dtypes/.github/snippet-bot.yml deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/packages/db-dtypes/.github/workflows/compliance.yml b/packages/db-dtypes/.github/workflows/compliance.yml deleted file mode 100644 index 39b9ae7a98ca..000000000000 --- a/packages/db-dtypes/.github/workflows/compliance.yml +++ /dev/null @@ -1,48 +0,0 @@ -on: - pull_request: - branches: - - main -name: unittest -jobs: - compliance: - runs-on: ubuntu-latest - strategy: - matrix: - python: ['3.13'] - steps: - - name: Checkout - uses: actions/checkout@v5 - - name: Setup Python - uses: actions/setup-python@v6 - with: - python-version: ${{ matrix.python }} - - name: Install nox - run: | - python -m pip install --upgrade setuptools pip wheel - python -m pip install nox - - name: Run compliance tests - env: - COVERAGE_FILE: .coverage-compliance-${{ matrix.python }} - run: | - nox -s compliance - compliance-prerelease: - runs-on: ubuntu-latest - strategy: - matrix: - python: ['3.13'] - steps: - - name: Checkout - uses: actions/checkout@v5 - - name: Setup Python - uses: actions/setup-python@v6 - with: - python-version: ${{ matrix.python }} - - name: Install nox - run: | - python -m pip install --upgrade setuptools pip wheel - python -m pip install nox - - name: Run compliance prerelease tests - env: - COVERAGE_FILE: .coverage-compliance-prerelease-${{ matrix.python }} - run: | - nox -s compliance_prerelease diff --git a/packages/db-dtypes/.github/workflows/docs.yml b/packages/db-dtypes/.github/workflows/docs.yml deleted file mode 100644 index 2833fe98fff0..000000000000 --- a/packages/db-dtypes/.github/workflows/docs.yml +++ /dev/null @@ -1,38 +0,0 @@ -on: - pull_request: - branches: - - main -name: docs -jobs: - docs: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Setup Python - uses: actions/setup-python@v5 - with: - python-version: "3.10" - - name: Install nox - run: | - python -m pip install --upgrade setuptools pip wheel - python -m pip install nox - - name: Run docs - run: | - nox -s docs - docfx: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Setup Python - uses: actions/setup-python@v5 - with: - python-version: "3.10" - - name: Install nox - run: | - python -m pip install --upgrade setuptools pip wheel - python -m pip install nox - - name: Run docfx - run: | - nox -s docfx diff --git a/packages/db-dtypes/.github/workflows/lint.yml b/packages/db-dtypes/.github/workflows/lint.yml deleted file mode 100644 index 1051da0bdda4..000000000000 --- a/packages/db-dtypes/.github/workflows/lint.yml +++ /dev/null @@ -1,25 +0,0 @@ -on: - pull_request: - branches: - - main -name: lint -jobs: - lint: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Setup Python - uses: actions/setup-python@v5 - with: - python-version: "3.10" - - name: Install nox - run: | - python -m pip install --upgrade setuptools pip wheel - python -m pip install nox - - name: Run lint - run: | - nox -s lint - - name: Run lint_setup_py - run: | - nox -s lint_setup_py diff --git a/packages/db-dtypes/.github/workflows/unittest.yml b/packages/db-dtypes/.github/workflows/unittest.yml deleted file mode 100644 index 6e05ae54c77b..000000000000 --- a/packages/db-dtypes/.github/workflows/unittest.yml +++ /dev/null @@ -1,120 +0,0 @@ -on: - pull_request: - branches: - - main - push: - branches: - - main -name: unittest -jobs: - unit: - # Use `ubuntu-latest` runner. - runs-on: ubuntu-latest - strategy: - matrix: - python: ['3.9', '3.10', '3.11', '3.12', '3.13'] - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Setup Python - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python }} - - name: Install nox - run: | - python -m pip install --upgrade setuptools pip wheel - python -m pip install nox - - name: Run unit tests - env: - COVERAGE_FILE: .coverage-${{ matrix.python }} - run: | - nox -s unit-${{ matrix.python }} - - name: Upload coverage results - uses: actions/upload-artifact@v4 - with: - name: coverage-artifact-${{ matrix.python }} - path: .coverage-${{ matrix.python }} - include-hidden-files: true - - unit-prerelease: - runs-on: ubuntu-latest - strategy: - matrix: - python: ['3.13'] - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Setup Python - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python }} - - name: Install nox - run: | - python -m pip install --upgrade setuptools pip wheel - python -m pip install nox - - name: Run unit tests - env: - COVERAGE_FILE: .coverage-prerelease-${{ matrix.python }} - run: | - nox -s unit_prerelease - - name: Upload coverage results - uses: actions/upload-artifact@v4 - with: - name: coverage-artifact-prerelease-${{ matrix.python }} - path: .coverage-prerelease-${{ matrix.python }} - include-hidden-files: true - - compliance: - runs-on: ubuntu-latest - strategy: - matrix: - python: ['3.13'] - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Setup Python - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python }} - - name: Install nox - run: | - python -m pip install --upgrade setuptools pip wheel - python -m pip install nox - - name: Run compliance tests - env: - COVERAGE_FILE: .coverage-compliance-${{ matrix.python }} - run: | - nox -s compliance-${{ matrix.python }} - - name: Upload coverage results - uses: actions/upload-artifact@v4 - with: - name: coverage-artifact-compliance-${{ matrix.python }} - path: .coverage-compliance-${{ matrix.python }} - include-hidden-files: true - - cover: - runs-on: ubuntu-latest - needs: - - unit - - unit-prerelease - - compliance - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Setup Python - uses: actions/setup-python@v5 - with: - python-version: "3.9" - - name: Install coverage - run: | - python -m pip install --upgrade setuptools pip wheel - python -m pip install coverage - - name: Download coverage results - uses: actions/download-artifact@v4 - with: - path: .coverage-results/ - - name: Report coverage results - run: | - find .coverage-results -type f -name '*.zip' -exec unzip {} \; - coverage combine .coverage-results/**/.coverage* - coverage report --show-missing --fail-under=100 diff --git a/packages/db-dtypes/.kokoro/build.sh b/packages/db-dtypes/.kokoro/build.sh deleted file mode 100755 index d41b45aa1dd0..000000000000 --- a/packages/db-dtypes/.kokoro/build.sh +++ /dev/null @@ -1,60 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -CURRENT_DIR=$(dirname "${BASH_SOURCE[0]}") - -if [[ -z "${PROJECT_ROOT:-}" ]]; then - PROJECT_ROOT=$(realpath "${CURRENT_DIR}/..") -fi - -pushd "${PROJECT_ROOT}" - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Debug: show build environment -env | grep KOKORO - -# Setup service account credentials. -if [[ -f "${KOKORO_GFILE_DIR}/service-account.json" ]] -then - export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json -fi - -# Setup project id. -if [[ -f "${KOKORO_GFILE_DIR}/project-id.json" ]] -then - export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") -fi - -# If this is a continuous build, send the test log to the FlakyBot. -# See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. -if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then - cleanup() { - chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot - $KOKORO_GFILE_DIR/linux_amd64/flakybot - } - trap cleanup EXIT HUP -fi - -# If NOX_SESSION is set, it only runs the specified session, -# otherwise run all the sessions. -if [[ -n "${NOX_SESSION:-}" ]]; then - python3 -m nox -s ${NOX_SESSION:-} -else - python3 -m nox -fi diff --git a/packages/db-dtypes/.kokoro/continuous/common.cfg b/packages/db-dtypes/.kokoro/continuous/common.cfg deleted file mode 100644 index b40f00c767c6..000000000000 --- a/packages/db-dtypes/.kokoro/continuous/common.cfg +++ /dev/null @@ -1,27 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Download resources for system tests (service account key, etc.) -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" - -# Use the trampoline script to run in docker. -build_file: "python-db-dtypes-pandas/.kokoro/trampoline.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-db-dtypes-pandas/.kokoro/build.sh" -} diff --git a/packages/db-dtypes/.kokoro/continuous/continuous.cfg b/packages/db-dtypes/.kokoro/continuous/continuous.cfg deleted file mode 100644 index 8f43917d92fe..000000000000 --- a/packages/db-dtypes/.kokoro/continuous/continuous.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/continuous/prerelease-deps.cfg b/packages/db-dtypes/.kokoro/continuous/prerelease-deps.cfg deleted file mode 100644 index 3595fb43f5c0..000000000000 --- a/packages/db-dtypes/.kokoro/continuous/prerelease-deps.cfg +++ /dev/null @@ -1,7 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Only run this nox session. -env_vars: { - key: "NOX_SESSION" - value: "prerelease_deps" -} diff --git a/packages/db-dtypes/.kokoro/populate-secrets.sh b/packages/db-dtypes/.kokoro/populate-secrets.sh deleted file mode 100755 index c435402f473e..000000000000 --- a/packages/db-dtypes/.kokoro/populate-secrets.sh +++ /dev/null @@ -1,43 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;} -function msg { println "$*" >&2 ;} -function println { printf '%s\n' "$(now) $*" ;} - - -# Populates requested secrets set in SECRET_MANAGER_KEYS from service account: -# kokoro-trampoline@cloud-devrel-kokoro-resources.iam.gserviceaccount.com -SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager" -msg "Creating folder on disk for secrets: ${SECRET_LOCATION}" -mkdir -p ${SECRET_LOCATION} -for key in $(echo ${SECRET_MANAGER_KEYS} | sed "s/,/ /g") -do - msg "Retrieving secret ${key}" - docker run --entrypoint=gcloud \ - --volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR} \ - gcr.io/google.com/cloudsdktool/cloud-sdk \ - secrets versions access latest \ - --project cloud-devrel-kokoro-resources \ - --secret ${key} > \ - "${SECRET_LOCATION}/${key}" - if [[ $? == 0 ]]; then - msg "Secret written to ${SECRET_LOCATION}/${key}" - else - msg "Error retrieving secret ${key}" - fi -done diff --git a/packages/db-dtypes/.kokoro/presubmit/common.cfg b/packages/db-dtypes/.kokoro/presubmit/common.cfg deleted file mode 100644 index b40f00c767c6..000000000000 --- a/packages/db-dtypes/.kokoro/presubmit/common.cfg +++ /dev/null @@ -1,27 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Download resources for system tests (service account key, etc.) -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" - -# Use the trampoline script to run in docker. -build_file: "python-db-dtypes-pandas/.kokoro/trampoline.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-db-dtypes-pandas/.kokoro/build.sh" -} diff --git a/packages/db-dtypes/.kokoro/presubmit/prerelease-deps.cfg b/packages/db-dtypes/.kokoro/presubmit/prerelease-deps.cfg deleted file mode 100644 index 3595fb43f5c0..000000000000 --- a/packages/db-dtypes/.kokoro/presubmit/prerelease-deps.cfg +++ /dev/null @@ -1,7 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Only run this nox session. -env_vars: { - key: "NOX_SESSION" - value: "prerelease_deps" -} diff --git a/packages/db-dtypes/.kokoro/presubmit/presubmit.cfg b/packages/db-dtypes/.kokoro/presubmit/presubmit.cfg deleted file mode 100644 index 8f43917d92fe..000000000000 --- a/packages/db-dtypes/.kokoro/presubmit/presubmit.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/lint/common.cfg b/packages/db-dtypes/.kokoro/samples/lint/common.cfg deleted file mode 100644 index ff39ec39f579..000000000000 --- a/packages/db-dtypes/.kokoro/samples/lint/common.cfg +++ /dev/null @@ -1,34 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "lint" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-db-dtypes-pandas/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-db-dtypes-pandas/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/lint/continuous.cfg b/packages/db-dtypes/.kokoro/samples/lint/continuous.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/db-dtypes/.kokoro/samples/lint/continuous.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/lint/periodic.cfg b/packages/db-dtypes/.kokoro/samples/lint/periodic.cfg deleted file mode 100644 index 50fec9649732..000000000000 --- a/packages/db-dtypes/.kokoro/samples/lint/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/lint/presubmit.cfg b/packages/db-dtypes/.kokoro/samples/lint/presubmit.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/db-dtypes/.kokoro/samples/lint/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.10/common.cfg b/packages/db-dtypes/.kokoro/samples/python3.10/common.cfg deleted file mode 100644 index bcf97b918aac..000000000000 --- a/packages/db-dtypes/.kokoro/samples/python3.10/common.cfg +++ /dev/null @@ -1,40 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "py-3.10" -} - -# Declare build specific Cloud project. -env_vars: { - key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-310" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-db-dtypes-pandas/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-db-dtypes-pandas/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.10/continuous.cfg b/packages/db-dtypes/.kokoro/samples/python3.10/continuous.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/db-dtypes/.kokoro/samples/python3.10/continuous.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.10/periodic-head.cfg b/packages/db-dtypes/.kokoro/samples/python3.10/periodic-head.cfg deleted file mode 100644 index ee3d56408db9..000000000000 --- a/packages/db-dtypes/.kokoro/samples/python3.10/periodic-head.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-db-dtypes-pandas/.kokoro/test-samples-against-head.sh" -} diff --git a/packages/db-dtypes/.kokoro/samples/python3.10/periodic.cfg b/packages/db-dtypes/.kokoro/samples/python3.10/periodic.cfg deleted file mode 100644 index 71cd1e597e38..000000000000 --- a/packages/db-dtypes/.kokoro/samples/python3.10/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} diff --git a/packages/db-dtypes/.kokoro/samples/python3.10/presubmit.cfg b/packages/db-dtypes/.kokoro/samples/python3.10/presubmit.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/db-dtypes/.kokoro/samples/python3.10/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.11/common.cfg b/packages/db-dtypes/.kokoro/samples/python3.11/common.cfg deleted file mode 100644 index 6f8962a49b8a..000000000000 --- a/packages/db-dtypes/.kokoro/samples/python3.11/common.cfg +++ /dev/null @@ -1,40 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "py-3.11" -} - -# Declare build specific Cloud project. -env_vars: { - key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-311" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-db-dtypes-pandas/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-db-dtypes-pandas/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.11/continuous.cfg b/packages/db-dtypes/.kokoro/samples/python3.11/continuous.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/db-dtypes/.kokoro/samples/python3.11/continuous.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.11/periodic-head.cfg b/packages/db-dtypes/.kokoro/samples/python3.11/periodic-head.cfg deleted file mode 100644 index ee3d56408db9..000000000000 --- a/packages/db-dtypes/.kokoro/samples/python3.11/periodic-head.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-db-dtypes-pandas/.kokoro/test-samples-against-head.sh" -} diff --git a/packages/db-dtypes/.kokoro/samples/python3.11/periodic.cfg b/packages/db-dtypes/.kokoro/samples/python3.11/periodic.cfg deleted file mode 100644 index 71cd1e597e38..000000000000 --- a/packages/db-dtypes/.kokoro/samples/python3.11/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} diff --git a/packages/db-dtypes/.kokoro/samples/python3.11/presubmit.cfg b/packages/db-dtypes/.kokoro/samples/python3.11/presubmit.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/db-dtypes/.kokoro/samples/python3.11/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.12/common.cfg b/packages/db-dtypes/.kokoro/samples/python3.12/common.cfg deleted file mode 100644 index ad97dcc3f040..000000000000 --- a/packages/db-dtypes/.kokoro/samples/python3.12/common.cfg +++ /dev/null @@ -1,40 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "py-3.12" -} - -# Declare build specific Cloud project. -env_vars: { - key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-312" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-db-dtypes-pandas/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-db-dtypes-pandas/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.12/continuous.cfg b/packages/db-dtypes/.kokoro/samples/python3.12/continuous.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/db-dtypes/.kokoro/samples/python3.12/continuous.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.12/periodic-head.cfg b/packages/db-dtypes/.kokoro/samples/python3.12/periodic-head.cfg deleted file mode 100644 index ee3d56408db9..000000000000 --- a/packages/db-dtypes/.kokoro/samples/python3.12/periodic-head.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-db-dtypes-pandas/.kokoro/test-samples-against-head.sh" -} diff --git a/packages/db-dtypes/.kokoro/samples/python3.12/periodic.cfg b/packages/db-dtypes/.kokoro/samples/python3.12/periodic.cfg deleted file mode 100644 index 71cd1e597e38..000000000000 --- a/packages/db-dtypes/.kokoro/samples/python3.12/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} diff --git a/packages/db-dtypes/.kokoro/samples/python3.12/presubmit.cfg b/packages/db-dtypes/.kokoro/samples/python3.12/presubmit.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/db-dtypes/.kokoro/samples/python3.12/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.13/common.cfg b/packages/db-dtypes/.kokoro/samples/python3.13/common.cfg deleted file mode 100644 index d81f6eed8552..000000000000 --- a/packages/db-dtypes/.kokoro/samples/python3.13/common.cfg +++ /dev/null @@ -1,40 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "py-3.13" -} - -# Declare build specific Cloud project. -env_vars: { - key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-313" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-db-dtypes-pandas/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-db-dtypes-pandas/.kokoro/trampoline_v2.sh" diff --git a/packages/db-dtypes/.kokoro/samples/python3.13/continuous.cfg b/packages/db-dtypes/.kokoro/samples/python3.13/continuous.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/db-dtypes/.kokoro/samples/python3.13/continuous.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.13/periodic-head.cfg b/packages/db-dtypes/.kokoro/samples/python3.13/periodic-head.cfg deleted file mode 100644 index ee3d56408db9..000000000000 --- a/packages/db-dtypes/.kokoro/samples/python3.13/periodic-head.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-db-dtypes-pandas/.kokoro/test-samples-against-head.sh" -} diff --git a/packages/db-dtypes/.kokoro/samples/python3.13/periodic.cfg b/packages/db-dtypes/.kokoro/samples/python3.13/periodic.cfg deleted file mode 100644 index 71cd1e597e38..000000000000 --- a/packages/db-dtypes/.kokoro/samples/python3.13/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} diff --git a/packages/db-dtypes/.kokoro/samples/python3.13/presubmit.cfg b/packages/db-dtypes/.kokoro/samples/python3.13/presubmit.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/db-dtypes/.kokoro/samples/python3.13/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.7/common.cfg b/packages/db-dtypes/.kokoro/samples/python3.7/common.cfg deleted file mode 100644 index cf54accebdf6..000000000000 --- a/packages/db-dtypes/.kokoro/samples/python3.7/common.cfg +++ /dev/null @@ -1,40 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "py-3.7" -} - -# Declare build specific Cloud project. -env_vars: { - key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-py37" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-db-dtypes-pandas/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-db-dtypes-pandas/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.7/continuous.cfg b/packages/db-dtypes/.kokoro/samples/python3.7/continuous.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/db-dtypes/.kokoro/samples/python3.7/continuous.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.7/periodic-head.cfg b/packages/db-dtypes/.kokoro/samples/python3.7/periodic-head.cfg deleted file mode 100644 index ee3d56408db9..000000000000 --- a/packages/db-dtypes/.kokoro/samples/python3.7/periodic-head.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-db-dtypes-pandas/.kokoro/test-samples-against-head.sh" -} diff --git a/packages/db-dtypes/.kokoro/samples/python3.7/periodic.cfg b/packages/db-dtypes/.kokoro/samples/python3.7/periodic.cfg deleted file mode 100644 index 71cd1e597e38..000000000000 --- a/packages/db-dtypes/.kokoro/samples/python3.7/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} diff --git a/packages/db-dtypes/.kokoro/samples/python3.7/presubmit.cfg b/packages/db-dtypes/.kokoro/samples/python3.7/presubmit.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/db-dtypes/.kokoro/samples/python3.7/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.8/common.cfg b/packages/db-dtypes/.kokoro/samples/python3.8/common.cfg deleted file mode 100644 index a8500a8a6f2d..000000000000 --- a/packages/db-dtypes/.kokoro/samples/python3.8/common.cfg +++ /dev/null @@ -1,40 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "py-3.8" -} - -# Declare build specific Cloud project. -env_vars: { - key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-py38" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-db-dtypes-pandas/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-db-dtypes-pandas/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.8/continuous.cfg b/packages/db-dtypes/.kokoro/samples/python3.8/continuous.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/db-dtypes/.kokoro/samples/python3.8/continuous.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.8/periodic-head.cfg b/packages/db-dtypes/.kokoro/samples/python3.8/periodic-head.cfg deleted file mode 100644 index ee3d56408db9..000000000000 --- a/packages/db-dtypes/.kokoro/samples/python3.8/periodic-head.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-db-dtypes-pandas/.kokoro/test-samples-against-head.sh" -} diff --git a/packages/db-dtypes/.kokoro/samples/python3.8/periodic.cfg b/packages/db-dtypes/.kokoro/samples/python3.8/periodic.cfg deleted file mode 100644 index 71cd1e597e38..000000000000 --- a/packages/db-dtypes/.kokoro/samples/python3.8/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} diff --git a/packages/db-dtypes/.kokoro/samples/python3.8/presubmit.cfg b/packages/db-dtypes/.kokoro/samples/python3.8/presubmit.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/db-dtypes/.kokoro/samples/python3.8/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.9/common.cfg b/packages/db-dtypes/.kokoro/samples/python3.9/common.cfg deleted file mode 100644 index 13262b7633e0..000000000000 --- a/packages/db-dtypes/.kokoro/samples/python3.9/common.cfg +++ /dev/null @@ -1,40 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "py-3.9" -} - -# Declare build specific Cloud project. -env_vars: { - key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-py39" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-db-dtypes-pandas/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-db-dtypes-pandas/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.9/continuous.cfg b/packages/db-dtypes/.kokoro/samples/python3.9/continuous.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/db-dtypes/.kokoro/samples/python3.9/continuous.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/samples/python3.9/periodic-head.cfg b/packages/db-dtypes/.kokoro/samples/python3.9/periodic-head.cfg deleted file mode 100644 index ee3d56408db9..000000000000 --- a/packages/db-dtypes/.kokoro/samples/python3.9/periodic-head.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-db-dtypes-pandas/.kokoro/test-samples-against-head.sh" -} diff --git a/packages/db-dtypes/.kokoro/samples/python3.9/periodic.cfg b/packages/db-dtypes/.kokoro/samples/python3.9/periodic.cfg deleted file mode 100644 index 71cd1e597e38..000000000000 --- a/packages/db-dtypes/.kokoro/samples/python3.9/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} diff --git a/packages/db-dtypes/.kokoro/samples/python3.9/presubmit.cfg b/packages/db-dtypes/.kokoro/samples/python3.9/presubmit.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/db-dtypes/.kokoro/samples/python3.9/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/test-samples-against-head.sh b/packages/db-dtypes/.kokoro/test-samples-against-head.sh deleted file mode 100755 index e9d8bd79a644..000000000000 --- a/packages/db-dtypes/.kokoro/test-samples-against-head.sh +++ /dev/null @@ -1,26 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# A customized test runner for samples. -# -# For periodic builds, you can specify this file for testing against head. - -# `-e` enables the script to automatically fail when a command fails -# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero -set -eo pipefail -# Enables `**` to include files nested inside sub-folders -shopt -s globstar - -exec .kokoro/test-samples-impl.sh diff --git a/packages/db-dtypes/.kokoro/test-samples-impl.sh b/packages/db-dtypes/.kokoro/test-samples-impl.sh deleted file mode 100755 index 53e365bc4e79..000000000000 --- a/packages/db-dtypes/.kokoro/test-samples-impl.sh +++ /dev/null @@ -1,103 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -# `-e` enables the script to automatically fail when a command fails -# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero -set -eo pipefail -# Enables `**` to include files nested inside sub-folders -shopt -s globstar - -# Exit early if samples don't exist -if ! find samples -name 'requirements.txt' | grep -q .; then - echo "No tests run. './samples/**/requirements.txt' not found" - exit 0 -fi - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Debug: show build environment -env | grep KOKORO - -# Install nox -# `virtualenv==20.26.6` is added for Python 3.7 compatibility -python3.9 -m pip install --upgrade --quiet nox virtualenv==20.26.6 - -# Use secrets acessor service account to get secrets -if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then - gcloud auth activate-service-account \ - --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ - --project="cloud-devrel-kokoro-resources" -fi - -# This script will create 3 files: -# - testing/test-env.sh -# - testing/service-account.json -# - testing/client-secrets.json -./scripts/decrypt-secrets.sh - -source ./testing/test-env.sh -export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json - -# For cloud-run session, we activate the service account for gcloud sdk. -gcloud auth activate-service-account \ - --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" - -export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json - -echo -e "\n******************** TESTING PROJECTS ********************" - -# Switch to 'fail at end' to allow all tests to complete before exiting. -set +e -# Use RTN to return a non-zero value if the test fails. -RTN=0 -ROOT=$(pwd) -# Find all requirements.txt in the samples directory (may break on whitespace). -for file in samples/**/requirements.txt; do - cd "$ROOT" - # Navigate to the project folder. - file=$(dirname "$file") - cd "$file" - - echo "------------------------------------------------------------" - echo "- testing $file" - echo "------------------------------------------------------------" - - # Use nox to execute the tests for the project. - python3.9 -m nox -s "$RUN_TESTS_SESSION" - EXIT=$? - - # If this is a periodic build, send the test log to the FlakyBot. - # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. - if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then - chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot - $KOKORO_GFILE_DIR/linux_amd64/flakybot - fi - - if [[ $EXIT -ne 0 ]]; then - RTN=1 - echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" - else - echo -e "\n Testing completed.\n" - fi - -done -cd "$ROOT" - -# Workaround for Kokoro permissions issue: delete secrets -rm testing/{test-env.sh,client-secrets.json,service-account.json} - -exit "$RTN" diff --git a/packages/db-dtypes/.kokoro/test-samples.sh b/packages/db-dtypes/.kokoro/test-samples.sh deleted file mode 100755 index 7933d820149a..000000000000 --- a/packages/db-dtypes/.kokoro/test-samples.sh +++ /dev/null @@ -1,44 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# The default test runner for samples. -# -# For periodic builds, we rewinds the repo to the latest release, and -# run test-samples-impl.sh. - -# `-e` enables the script to automatically fail when a command fails -# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero -set -eo pipefail -# Enables `**` to include files nested inside sub-folders -shopt -s globstar - -# Run periodic samples tests at latest release -if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then - # preserving the test runner implementation. - cp .kokoro/test-samples-impl.sh "${TMPDIR}/test-samples-impl.sh" - echo "--- IMPORTANT IMPORTANT IMPORTANT ---" - echo "Now we rewind the repo back to the latest release..." - LATEST_RELEASE=$(git describe --abbrev=0 --tags) - git checkout $LATEST_RELEASE - echo "The current head is: " - echo $(git rev-parse --verify HEAD) - echo "--- IMPORTANT IMPORTANT IMPORTANT ---" - # move back the test runner implementation if there's no file. - if [ ! -f .kokoro/test-samples-impl.sh ]; then - cp "${TMPDIR}/test-samples-impl.sh" .kokoro/test-samples-impl.sh - fi -fi - -exec .kokoro/test-samples-impl.sh diff --git a/packages/db-dtypes/.kokoro/trampoline.sh b/packages/db-dtypes/.kokoro/trampoline.sh deleted file mode 100755 index 48f79699706e..000000000000 --- a/packages/db-dtypes/.kokoro/trampoline.sh +++ /dev/null @@ -1,28 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -# Always run the cleanup script, regardless of the success of bouncing into -# the container. -function cleanup() { - chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh - ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh - echo "cleanup"; -} -trap cleanup EXIT - -$(dirname $0)/populate-secrets.sh # Secret Manager secrets. -python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" \ No newline at end of file diff --git a/packages/db-dtypes/.kokoro/trampoline_v2.sh b/packages/db-dtypes/.kokoro/trampoline_v2.sh deleted file mode 100755 index 35fa529231dc..000000000000 --- a/packages/db-dtypes/.kokoro/trampoline_v2.sh +++ /dev/null @@ -1,487 +0,0 @@ -#!/usr/bin/env bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# trampoline_v2.sh -# -# This script does 3 things. -# -# 1. Prepare the Docker image for the test -# 2. Run the Docker with appropriate flags to run the test -# 3. Upload the newly built Docker image -# -# in a way that is somewhat compatible with trampoline_v1. -# -# To run this script, first download few files from gcs to /dev/shm. -# (/dev/shm is passed into the container as KOKORO_GFILE_DIR). -# -# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/secrets_viewer_service_account.json /dev/shm -# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/automl_secrets.txt /dev/shm -# -# Then run the script. -# .kokoro/trampoline_v2.sh -# -# These environment variables are required: -# TRAMPOLINE_IMAGE: The docker image to use. -# TRAMPOLINE_DOCKERFILE: The location of the Dockerfile. -# -# You can optionally change these environment variables: -# TRAMPOLINE_IMAGE_UPLOAD: -# (true|false): Whether to upload the Docker image after the -# successful builds. -# TRAMPOLINE_BUILD_FILE: The script to run in the docker container. -# TRAMPOLINE_WORKSPACE: The workspace path in the docker container. -# Defaults to /workspace. -# Potentially there are some repo specific envvars in .trampolinerc in -# the project root. - - -set -euo pipefail - -TRAMPOLINE_VERSION="2.0.5" - -if command -v tput >/dev/null && [[ -n "${TERM:-}" ]]; then - readonly IO_COLOR_RED="$(tput setaf 1)" - readonly IO_COLOR_GREEN="$(tput setaf 2)" - readonly IO_COLOR_YELLOW="$(tput setaf 3)" - readonly IO_COLOR_RESET="$(tput sgr0)" -else - readonly IO_COLOR_RED="" - readonly IO_COLOR_GREEN="" - readonly IO_COLOR_YELLOW="" - readonly IO_COLOR_RESET="" -fi - -function function_exists { - [ $(LC_ALL=C type -t $1)"" == "function" ] -} - -# Logs a message using the given color. The first argument must be one -# of the IO_COLOR_* variables defined above, such as -# "${IO_COLOR_YELLOW}". The remaining arguments will be logged in the -# given color. The log message will also have an RFC-3339 timestamp -# prepended (in UTC). You can disable the color output by setting -# TERM=vt100. -function log_impl() { - local color="$1" - shift - local timestamp="$(date -u "+%Y-%m-%dT%H:%M:%SZ")" - echo "================================================================" - echo "${color}${timestamp}:" "$@" "${IO_COLOR_RESET}" - echo "================================================================" -} - -# Logs the given message with normal coloring and a timestamp. -function log() { - log_impl "${IO_COLOR_RESET}" "$@" -} - -# Logs the given message in green with a timestamp. -function log_green() { - log_impl "${IO_COLOR_GREEN}" "$@" -} - -# Logs the given message in yellow with a timestamp. -function log_yellow() { - log_impl "${IO_COLOR_YELLOW}" "$@" -} - -# Logs the given message in red with a timestamp. -function log_red() { - log_impl "${IO_COLOR_RED}" "$@" -} - -readonly tmpdir=$(mktemp -d -t ci-XXXXXXXX) -readonly tmphome="${tmpdir}/h" -mkdir -p "${tmphome}" - -function cleanup() { - rm -rf "${tmpdir}" -} -trap cleanup EXIT - -RUNNING_IN_CI="${RUNNING_IN_CI:-false}" - -# The workspace in the container, defaults to /workspace. -TRAMPOLINE_WORKSPACE="${TRAMPOLINE_WORKSPACE:-/workspace}" - -pass_down_envvars=( - # TRAMPOLINE_V2 variables. - # Tells scripts whether they are running as part of CI or not. - "RUNNING_IN_CI" - # Indicates which CI system we're in. - "TRAMPOLINE_CI" - # Indicates the version of the script. - "TRAMPOLINE_VERSION" -) - -log_yellow "Building with Trampoline ${TRAMPOLINE_VERSION}" - -# Detect which CI systems we're in. If we're in any of the CI systems -# we support, `RUNNING_IN_CI` will be true and `TRAMPOLINE_CI` will be -# the name of the CI system. Both envvars will be passing down to the -# container for telling which CI system we're in. -if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then - # descriptive env var for indicating it's on CI. - RUNNING_IN_CI="true" - TRAMPOLINE_CI="kokoro" - if [[ "${TRAMPOLINE_USE_LEGACY_SERVICE_ACCOUNT:-}" == "true" ]]; then - if [[ ! -f "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" ]]; then - log_red "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json does not exist. Did you forget to mount cloud-devrel-kokoro-resources/trampoline? Aborting." - exit 1 - fi - # This service account will be activated later. - TRAMPOLINE_SERVICE_ACCOUNT="${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" - else - if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then - gcloud auth list - fi - log_yellow "Configuring Container Registry access" - gcloud auth configure-docker --quiet - fi - pass_down_envvars+=( - # KOKORO dynamic variables. - "KOKORO_BUILD_NUMBER" - "KOKORO_BUILD_ID" - "KOKORO_JOB_NAME" - "KOKORO_GIT_COMMIT" - "KOKORO_GITHUB_COMMIT" - "KOKORO_GITHUB_PULL_REQUEST_NUMBER" - "KOKORO_GITHUB_PULL_REQUEST_COMMIT" - # For FlakyBot - "KOKORO_GITHUB_COMMIT_URL" - "KOKORO_GITHUB_PULL_REQUEST_URL" - ) -elif [[ "${TRAVIS:-}" == "true" ]]; then - RUNNING_IN_CI="true" - TRAMPOLINE_CI="travis" - pass_down_envvars+=( - "TRAVIS_BRANCH" - "TRAVIS_BUILD_ID" - "TRAVIS_BUILD_NUMBER" - "TRAVIS_BUILD_WEB_URL" - "TRAVIS_COMMIT" - "TRAVIS_COMMIT_MESSAGE" - "TRAVIS_COMMIT_RANGE" - "TRAVIS_JOB_NAME" - "TRAVIS_JOB_NUMBER" - "TRAVIS_JOB_WEB_URL" - "TRAVIS_PULL_REQUEST" - "TRAVIS_PULL_REQUEST_BRANCH" - "TRAVIS_PULL_REQUEST_SHA" - "TRAVIS_PULL_REQUEST_SLUG" - "TRAVIS_REPO_SLUG" - "TRAVIS_SECURE_ENV_VARS" - "TRAVIS_TAG" - ) -elif [[ -n "${GITHUB_RUN_ID:-}" ]]; then - RUNNING_IN_CI="true" - TRAMPOLINE_CI="github-workflow" - pass_down_envvars+=( - "GITHUB_WORKFLOW" - "GITHUB_RUN_ID" - "GITHUB_RUN_NUMBER" - "GITHUB_ACTION" - "GITHUB_ACTIONS" - "GITHUB_ACTOR" - "GITHUB_REPOSITORY" - "GITHUB_EVENT_NAME" - "GITHUB_EVENT_PATH" - "GITHUB_SHA" - "GITHUB_REF" - "GITHUB_HEAD_REF" - "GITHUB_BASE_REF" - ) -elif [[ "${CIRCLECI:-}" == "true" ]]; then - RUNNING_IN_CI="true" - TRAMPOLINE_CI="circleci" - pass_down_envvars+=( - "CIRCLE_BRANCH" - "CIRCLE_BUILD_NUM" - "CIRCLE_BUILD_URL" - "CIRCLE_COMPARE_URL" - "CIRCLE_JOB" - "CIRCLE_NODE_INDEX" - "CIRCLE_NODE_TOTAL" - "CIRCLE_PREVIOUS_BUILD_NUM" - "CIRCLE_PROJECT_REPONAME" - "CIRCLE_PROJECT_USERNAME" - "CIRCLE_REPOSITORY_URL" - "CIRCLE_SHA1" - "CIRCLE_STAGE" - "CIRCLE_USERNAME" - "CIRCLE_WORKFLOW_ID" - "CIRCLE_WORKFLOW_JOB_ID" - "CIRCLE_WORKFLOW_UPSTREAM_JOB_IDS" - "CIRCLE_WORKFLOW_WORKSPACE_ID" - ) -fi - -# Configure the service account for pulling the docker image. -function repo_root() { - local dir="$1" - while [[ ! -d "${dir}/.git" ]]; do - dir="$(dirname "$dir")" - done - echo "${dir}" -} - -# Detect the project root. In CI builds, we assume the script is in -# the git tree and traverse from there, otherwise, traverse from `pwd` -# to find `.git` directory. -if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then - PROGRAM_PATH="$(realpath "$0")" - PROGRAM_DIR="$(dirname "${PROGRAM_PATH}")" - PROJECT_ROOT="$(repo_root "${PROGRAM_DIR}")" -else - PROJECT_ROOT="$(repo_root $(pwd))" -fi - -log_yellow "Changing to the project root: ${PROJECT_ROOT}." -cd "${PROJECT_ROOT}" - -# To support relative path for `TRAMPOLINE_SERVICE_ACCOUNT`, we need -# to use this environment variable in `PROJECT_ROOT`. -if [[ -n "${TRAMPOLINE_SERVICE_ACCOUNT:-}" ]]; then - - mkdir -p "${tmpdir}/gcloud" - gcloud_config_dir="${tmpdir}/gcloud" - - log_yellow "Using isolated gcloud config: ${gcloud_config_dir}." - export CLOUDSDK_CONFIG="${gcloud_config_dir}" - - log_yellow "Using ${TRAMPOLINE_SERVICE_ACCOUNT} for authentication." - gcloud auth activate-service-account \ - --key-file "${TRAMPOLINE_SERVICE_ACCOUNT}" - log_yellow "Configuring Container Registry access" - gcloud auth configure-docker --quiet -fi - -required_envvars=( - # The basic trampoline configurations. - "TRAMPOLINE_IMAGE" - "TRAMPOLINE_BUILD_FILE" -) - -if [[ -f "${PROJECT_ROOT}/.trampolinerc" ]]; then - source "${PROJECT_ROOT}/.trampolinerc" -fi - -log_yellow "Checking environment variables." -for e in "${required_envvars[@]}" -do - if [[ -z "${!e:-}" ]]; then - log "Missing ${e} env var. Aborting." - exit 1 - fi -done - -# We want to support legacy style TRAMPOLINE_BUILD_FILE used with V1 -# script: e.g. "github/repo-name/.kokoro/run_tests.sh" -TRAMPOLINE_BUILD_FILE="${TRAMPOLINE_BUILD_FILE#github/*/}" -log_yellow "Using TRAMPOLINE_BUILD_FILE: ${TRAMPOLINE_BUILD_FILE}" - -# ignore error on docker operations and test execution -set +e - -log_yellow "Preparing Docker image." -# We only download the docker image in CI builds. -if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then - # Download the docker image specified by `TRAMPOLINE_IMAGE` - - # We may want to add --max-concurrent-downloads flag. - - log_yellow "Start pulling the Docker image: ${TRAMPOLINE_IMAGE}." - if docker pull "${TRAMPOLINE_IMAGE}"; then - log_green "Finished pulling the Docker image: ${TRAMPOLINE_IMAGE}." - has_image="true" - else - log_red "Failed pulling the Docker image: ${TRAMPOLINE_IMAGE}." - has_image="false" - fi -else - # For local run, check if we have the image. - if docker images "${TRAMPOLINE_IMAGE}:latest" | grep "${TRAMPOLINE_IMAGE}"; then - has_image="true" - else - has_image="false" - fi -fi - - -# The default user for a Docker container has uid 0 (root). To avoid -# creating root-owned files in the build directory we tell docker to -# use the current user ID. -user_uid="$(id -u)" -user_gid="$(id -g)" -user_name="$(id -un)" - -# To allow docker in docker, we add the user to the docker group in -# the host os. -docker_gid=$(cut -d: -f3 < <(getent group docker)) - -update_cache="false" -if [[ "${TRAMPOLINE_DOCKERFILE:-none}" != "none" ]]; then - # Build the Docker image from the source. - context_dir=$(dirname "${TRAMPOLINE_DOCKERFILE}") - docker_build_flags=( - "-f" "${TRAMPOLINE_DOCKERFILE}" - "-t" "${TRAMPOLINE_IMAGE}" - "--build-arg" "UID=${user_uid}" - "--build-arg" "USERNAME=${user_name}" - ) - if [[ "${has_image}" == "true" ]]; then - docker_build_flags+=("--cache-from" "${TRAMPOLINE_IMAGE}") - fi - - log_yellow "Start building the docker image." - if [[ "${TRAMPOLINE_VERBOSE:-false}" == "true" ]]; then - echo "docker build" "${docker_build_flags[@]}" "${context_dir}" - fi - - # ON CI systems, we want to suppress docker build logs, only - # output the logs when it fails. - if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then - if docker build "${docker_build_flags[@]}" "${context_dir}" \ - > "${tmpdir}/docker_build.log" 2>&1; then - if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then - cat "${tmpdir}/docker_build.log" - fi - - log_green "Finished building the docker image." - update_cache="true" - else - log_red "Failed to build the Docker image, aborting." - log_yellow "Dumping the build logs:" - cat "${tmpdir}/docker_build.log" - exit 1 - fi - else - if docker build "${docker_build_flags[@]}" "${context_dir}"; then - log_green "Finished building the docker image." - update_cache="true" - else - log_red "Failed to build the Docker image, aborting." - exit 1 - fi - fi -else - if [[ "${has_image}" != "true" ]]; then - log_red "We do not have ${TRAMPOLINE_IMAGE} locally, aborting." - exit 1 - fi -fi - -# We use an array for the flags so they are easier to document. -docker_flags=( - # Remove the container after it exists. - "--rm" - - # Use the host network. - "--network=host" - - # Run in priviledged mode. We are not using docker for sandboxing or - # isolation, just for packaging our dev tools. - "--privileged" - - # Run the docker script with the user id. Because the docker image gets to - # write in ${PWD} you typically want this to be your user id. - # To allow docker in docker, we need to use docker gid on the host. - "--user" "${user_uid}:${docker_gid}" - - # Pass down the USER. - "--env" "USER=${user_name}" - - # Mount the project directory inside the Docker container. - "--volume" "${PROJECT_ROOT}:${TRAMPOLINE_WORKSPACE}" - "--workdir" "${TRAMPOLINE_WORKSPACE}" - "--env" "PROJECT_ROOT=${TRAMPOLINE_WORKSPACE}" - - # Mount the temporary home directory. - "--volume" "${tmphome}:/h" - "--env" "HOME=/h" - - # Allow docker in docker. - "--volume" "/var/run/docker.sock:/var/run/docker.sock" - - # Mount the /tmp so that docker in docker can mount the files - # there correctly. - "--volume" "/tmp:/tmp" - # Pass down the KOKORO_GFILE_DIR and KOKORO_KEYSTORE_DIR - # TODO(tmatsuo): This part is not portable. - "--env" "TRAMPOLINE_SECRET_DIR=/secrets" - "--volume" "${KOKORO_GFILE_DIR:-/dev/shm}:/secrets/gfile" - "--env" "KOKORO_GFILE_DIR=/secrets/gfile" - "--volume" "${KOKORO_KEYSTORE_DIR:-/dev/shm}:/secrets/keystore" - "--env" "KOKORO_KEYSTORE_DIR=/secrets/keystore" -) - -# Add an option for nicer output if the build gets a tty. -if [[ -t 0 ]]; then - docker_flags+=("-it") -fi - -# Passing down env vars -for e in "${pass_down_envvars[@]}" -do - if [[ -n "${!e:-}" ]]; then - docker_flags+=("--env" "${e}=${!e}") - fi -done - -# If arguments are given, all arguments will become the commands run -# in the container, otherwise run TRAMPOLINE_BUILD_FILE. -if [[ $# -ge 1 ]]; then - log_yellow "Running the given commands '" "${@:1}" "' in the container." - readonly commands=("${@:1}") - if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then - echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" - fi - docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" -else - log_yellow "Running the tests in a Docker container." - docker_flags+=("--entrypoint=${TRAMPOLINE_BUILD_FILE}") - if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then - echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" - fi - docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" -fi - - -test_retval=$? - -if [[ ${test_retval} -eq 0 ]]; then - log_green "Build finished with ${test_retval}" -else - log_red "Build finished with ${test_retval}" -fi - -# Only upload it when the test passes. -if [[ "${update_cache}" == "true" ]] && \ - [[ $test_retval == 0 ]] && \ - [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]]; then - log_yellow "Uploading the Docker image." - if docker push "${TRAMPOLINE_IMAGE}"; then - log_green "Finished uploading the Docker image." - else - log_red "Failed uploading the Docker image." - fi - # Call trampoline_after_upload_hook if it's defined. - if function_exists trampoline_after_upload_hook; then - trampoline_after_upload_hook - fi - -fi - -exit "${test_retval}" diff --git a/packages/db-dtypes/.trampolinerc b/packages/db-dtypes/.trampolinerc deleted file mode 100644 index 0080152373d5..000000000000 --- a/packages/db-dtypes/.trampolinerc +++ /dev/null @@ -1,61 +0,0 @@ -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Add required env vars here. -required_envvars+=( -) - -# Add env vars which are passed down into the container here. -pass_down_envvars+=( - "NOX_SESSION" - ############### - # Docs builds - ############### - "STAGING_BUCKET" - "V2_STAGING_BUCKET" - ################## - # Samples builds - ################## - "INSTALL_LIBRARY_FROM_SOURCE" - "RUN_TESTS_SESSION" - "BUILD_SPECIFIC_GCLOUD_PROJECT" - # Target directories. - "RUN_TESTS_DIRS" - # The nox session to run. - "RUN_TESTS_SESSION" -) - -# Prevent unintentional override on the default image. -if [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]] && \ - [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then - echo "Please set TRAMPOLINE_IMAGE if you want to upload the Docker image." - exit 1 -fi - -# Define the default value if it makes sense. -if [[ -z "${TRAMPOLINE_IMAGE_UPLOAD:-}" ]]; then - TRAMPOLINE_IMAGE_UPLOAD="" -fi - -if [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then - TRAMPOLINE_IMAGE="" -fi - -if [[ -z "${TRAMPOLINE_DOCKERFILE:-}" ]]; then - TRAMPOLINE_DOCKERFILE="" -fi - -if [[ -z "${TRAMPOLINE_BUILD_FILE:-}" ]]; then - TRAMPOLINE_BUILD_FILE="" -fi diff --git a/packages/db-dtypes/docs/changelog.md b/packages/db-dtypes/docs/changelog.md deleted file mode 120000 index 04c99a55caae..000000000000 --- a/packages/db-dtypes/docs/changelog.md +++ /dev/null @@ -1 +0,0 @@ -../CHANGELOG.md \ No newline at end of file