diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index ff71365e69..f90140cbfa 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -226,12 +226,14 @@ We support: - `Python 3.11`_ - `Python 3.12`_ - `Python 3.13`_ +- `Python 3.14`_ .. _Python 3.9: https://docs.python.org/3.9/ .. _Python 3.10: https://docs.python.org/3.10/ .. _Python 3.11: https://docs.python.org/3.11/ .. _Python 3.12: https://docs.python.org/3.12/ .. _Python 3.13: https://docs.python.org/3.13/ +.. _Python 3.14: https://docs.python.org/3.14/ Supported versions can be found in our ``noxfile.py`` `config`_. diff --git a/README.rst b/README.rst index 20f1413155..05a77e7072 100644 --- a/README.rst +++ b/README.rst @@ -24,6 +24,8 @@ Vertex AI SDK for Python .. _Client Library Documentation: https://cloud.google.com/python/docs/reference/aiplatform/latest .. _Product Documentation: https://cloud.google.com/vertex-ai/docs +# Trivial comment for CI baseline + Installation ~~~~~~~~~~~~ diff --git a/google/cloud/aiplatform/utils/source_utils.py b/google/cloud/aiplatform/utils/source_utils.py index dc3c14a759..16c100b96f 100644 --- a/google/cloud/aiplatform/utils/source_utils.py +++ b/google/cloud/aiplatform/utils/source_utils.py @@ -183,6 +183,22 @@ def make_package(self, package_directory: str) -> str: # Copy script as module of python package. shutil.copy(self.script_path, script_out_path) + # Ensure setuptools is installed + install_setuptools_cmd = ["uv", "pip", "install", "setuptools"] + p_install = subprocess.Popen( + args=install_setuptools_cmd, + cwd=trainer_root_path, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + output, error = p_install.communicate() + + if p_install.returncode != 0: + raise RuntimeError( + "Failed to install setuptools, code %d\n%s \n%s" + % (p_install.returncode, output.decode(), error.decode()) + ) + # Run setup.py to create the source distribution. setup_cmd = [ _get_python_executable() diff --git a/noxfile.py b/noxfile.py index 2ea6d71713..2a42546e32 100644 --- a/noxfile.py +++ b/noxfile.py @@ -32,7 +32,7 @@ ISORT_VERSION = "isort==5.10.1" LINT_PATHS = ["docs", "google", "vertexai", "tests", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.10" +DEFAULT_PYTHON_VERSION = "3.14" DOCS_DEPENDENCIES = ( "sphinx==5.0.2", @@ -69,6 +69,7 @@ "pytest-asyncio", # Preventing: py.test: error: unrecognized arguments: -n=auto --dist=loadscope "pytest-xdist", + "pyyaml>=5.3.1,<7", ] UNIT_TEST_EXTERNAL_DEPENDENCIES = [] UNIT_TEST_LOCAL_DEPENDENCIES = [] @@ -78,7 +79,7 @@ ] UNIT_TEST_EXTRAS_BY_PYTHON = {} -SYSTEM_TEST_PYTHON_VERSIONS = ["3.10"] +SYSTEM_TEST_PYTHON_VERSIONS = ["3.14"] SYSTEM_TEST_STANDARD_DEPENDENCIES = [ "mock", "pytest", @@ -121,6 +122,7 @@ def lint(session): serious code quality issues. """ session.install(FLAKE8_VERSION, BLACK_VERSION) + session.run("uv", "pip", "list", "--format=freeze") session.run( "black", "--check", @@ -133,7 +135,7 @@ def lint(session): @nox.session(python=DEFAULT_PYTHON_VERSION, venv_backend="virtualenv") def blacken(session): """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) + session.run("python", "-m", "pip", "freeze") session.run( "black", *LINT_PATHS, @@ -147,6 +149,7 @@ def format(session): to format code to uniform standard. """ session.install(BLACK_VERSION, ISORT_VERSION) + session.run("uv", "pip", "list", "--format=freeze") # Use the --fss option to sort imports using strict alphabetical order. # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections session.run( @@ -164,6 +167,7 @@ def format(session): def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" session.install("docutils", "pygments", "setuptools") + session.run("uv", "pip", "list", "--format=freeze") session.run("python", "setup.py", "check", "--restructuredtext", "--strict") @@ -184,12 +188,6 @@ def install_unittest_dependencies(session, *constraints): if UNIT_TEST_EXTRAS_BY_PYTHON: extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) - elif UNIT_TEST_EXTRAS: - extras = UNIT_TEST_EXTRAS - else: - extras = [] - - if extras: session.install("-e", f".[{','.join(extras)}]", *constraints) else: session.install("-e", ".", *constraints) @@ -202,6 +200,7 @@ def default(session): CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) install_unittest_dependencies(session, "-c", constraints_path) + session.run("uv", "pip", "list", "--format=freeze") # Run py.test against the unit tests. session.run( @@ -249,6 +248,7 @@ def unit_genai_minimal_dependencies(session): standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES session.install(*standard_deps) session.install("-e", ".") + session.run("uv", "pip", "list", "--format=freeze") # Run py.test against the unit tests. session.run( @@ -277,6 +277,7 @@ def unit_ray(session, ray): # Install ray extras session.install("-e", ".[ray_testing]", "-c", constraints_path) + session.run("uv", "pip", "list", "--format=freeze") # Run py.test against the unit tests. session.run( @@ -303,6 +304,7 @@ def unit_langchain(session): # Install langchain extras session.install("-e", ".[langchain_testing]", "-c", constraints_path) + session.run("uv", "pip", "list", "--format=freeze") # Run py.test against the unit tests. session.run( @@ -329,6 +331,7 @@ def unit_ag2(session): # Install ag2 extras session.install("-e", ".[ag2_testing]", "-c", constraints_path) + session.run("uv", "pip", "list", "--format=freeze") # Run py.test against the unit tests. session.run( @@ -357,6 +360,7 @@ def unit_llama_index(session): # Install llama_index extras session.install("-e", ".[llama_index_testing]", "-c", constraints_path) + session.run("uv", "pip", "list", "--format=freeze") # Run py.test against the unit tests. session.run( @@ -426,6 +430,7 @@ def system(session): session.skip("System tests were not found") install_systemtest_dependencies(session, "-c", constraints_path) + session.run("uv", "pip", "list", "--format=freeze") # Run py.test against the system tests. if system_test_exists: @@ -454,6 +459,7 @@ def cover(session): test runs (not system test runs), and then erases coverage data. """ session.install("coverage", "pytest-cov") + session.run("uv", "pip", "list", "--format=freeze") session.run("coverage", "report", "--show-missing", "--fail-under=85") session.run("coverage", "erase") @@ -468,6 +474,7 @@ def docs(session): *DOCS_DEPENDENCIES, "google-cloud-aiplatform[prediction]", ) + session.run("python", "-m", "pip", "freeze") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( @@ -492,6 +499,7 @@ def docfx(session): *DOCFX_DEPENDENCIES, "google-cloud-aiplatform[prediction]", ) + session.run("python", "-m", "pip", "freeze") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( @@ -525,6 +533,7 @@ def gemini_docs(session): session.install("-e", ".") session.install(*DOCS_DEPENDENCIES) + session.run("python", "-m", "pip", "freeze") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( @@ -546,6 +555,7 @@ def gemini_docfx(session): session.install("-e", ".") session.install(*DOCFX_DEPENDENCIES) + session.run("python", "-m", "pip", "freeze") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( @@ -631,6 +641,7 @@ def prerelease_deps(session): "google-auth", ] session.install(*other_deps) + session.run("uv", "pip", "list", "--format=freeze") # Print out prerelease package versions session.run( diff --git a/owlbot.py b/owlbot.py index 569c043bfa..b5a55ca428 100644 --- a/owlbot.py +++ b/owlbot.py @@ -98,7 +98,7 @@ templated_files = common.py_library( cov_level=98, - system_test_python_versions=["3.9"], + system_test_python_versions=["3.14"], unit_test_python_versions=["3.9", "3.10", "3.11", "3.12", "3.13", "3.14"], unit_test_extras=["testing"], system_test_extras=["testing"], diff --git a/samples/model-builder/noxfile.py b/samples/model-builder/noxfile.py index a169b5b5b4..69bcaf56de 100644 --- a/samples/model-builder/noxfile.py +++ b/samples/model-builder/noxfile.py @@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13", "3.14"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py index a169b5b5b4..69bcaf56de 100644 --- a/samples/snippets/noxfile.py +++ b/samples/snippets/noxfile.py @@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13", "3.14"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] diff --git a/setup.py b/setup.py index bd3a7fe965..ccee0e1c4a 100644 --- a/setup.py +++ b/setup.py @@ -118,7 +118,8 @@ "google-cloud-bigquery-storage", "google-cloud-bigquery", "pandas >= 1.0.0", - "pyarrow >= 6.0.1", + "pyarrow >= 6.0.1, <= 14.0.2; python_version < '3.11'", + "pyarrow >= 15.0.0; python_version >= '3.11'", "immutabledict", ] @@ -278,6 +279,8 @@ "requests-toolbelt <= 1.0.0", "immutabledict", "xgboost", + "setuptools", + "pyyaml>=5.3.1,<7", ] ) diff --git a/testing/constraints-ray-2.33.0.txt b/testing/constraints-ray-2.33.0.txt index 10bc7c96f9..095993ca47 100644 --- a/testing/constraints-ray-2.33.0.txt +++ b/testing/constraints-ray-2.33.0.txt @@ -1,13 +1,16 @@ ray==2.33.0 +numpy<2.0.0 # Ray 2.33.0 not compatible with NumPy 2.x # Below constraints are inherited from constraints-3.10.txt +setuptools<70.0.0 google-api-core proto-plus==1.22.3 protobuf mock==4.0.2 -google-cloud-storage==3.0.0 # Updated to v3.x, backward compatible with v2.x via wrapper +google-cloud-storage==2.2.1 # Increased for kfp 2.0 compatibility packaging==24.1 # Increased to unbreak canonicalize_version error (b/377774673) grpcio-testing==1.34.0 mlflow==1.30.1 # Pinned to speed up installation pytest-xdist==3.3.1 # Pinned to unbreak unit tests IPython # Added to test supernova rich html buttons - +pandas==2.1.4 # Pandas must be <2.2.0 to be compatible with ray 2.33.0 +scikit-learn<1.6.0 # Breaking Ray 2.33.0 \ No newline at end of file diff --git a/testing/constraints-ray-2.42.0.txt b/testing/constraints-ray-2.42.0.txt index 5844dca74f..a97fc6af46 100644 --- a/testing/constraints-ray-2.42.0.txt +++ b/testing/constraints-ray-2.42.0.txt @@ -1,13 +1,16 @@ ray==2.42.0 +numpy<2.0.0 # Ray 2.42.0 not compatible with NumPy 2.x on Py 3.10 # Below constraints are inherited from constraints-3.10.txt +setuptools<70.0.0 google-api-core proto-plus==1.22.3 protobuf mock==4.0.2 -google-cloud-storage==3.0.0 # Updated to v3.x, backward compatible with v2.x via wrapper +google-cloud-storage==2.2.1 # Increased for kfp 2.0 compatibility packaging==24.1 # Increased to unbreak canonicalize_version error (b/377774673) grpcio-testing==1.34.0 mlflow==1.30.1 # Pinned to speed up installation pytest-xdist==3.3.1 # Pinned to unbreak unit tests IPython # Added to test supernova rich html buttons - +pandas==2.1.4 # Pandas must be <2.2.0 to be compatible with ray 2.42.0 +scikit-learn<1.6.0 # Breaking Ray 2.42.0 \ No newline at end of file diff --git a/testing/constraints-ray-2.9.3.txt b/testing/constraints-ray-2.9.3.txt index c4c1ea816c..03023a2cba 100644 --- a/testing/constraints-ray-2.9.3.txt +++ b/testing/constraints-ray-2.9.3.txt @@ -1,4 +1,5 @@ ray==2.9.3 +numpy<2.0.0 # Ray 2.9.3 not compatible with NumPy 2.x # Below constraints are inherited from constraints-3.10.txt setuptools<70.0.0 google-api-core diff --git a/tests/unit/vertex_ray/test_vertex_ray_client.py b/tests/unit/vertex_ray/test_vertex_ray_client.py index a896b101b4..9bd7d868d3 100644 --- a/tests/unit/vertex_ray/test_vertex_ray_client.py +++ b/tests/unit/vertex_ray/test_vertex_ray_client.py @@ -24,14 +24,25 @@ # -*- coding: utf-8 -*- -_TEST_CLIENT_CONTEXT = ray.client_builder.ClientContext( - dashboard_url=tc.ClusterConstants.TEST_VERTEX_RAY_DASHBOARD_ADDRESS, - python_version="MOCK_PYTHON_VERSION", - ray_version="MOCK_RAY_VERSION", - ray_commit="MOCK_RAY_COMMIT", - _num_clients=1, - _context_to_restore=None, -) +try: + _TEST_CLIENT_CONTEXT = ray.client_builder.ClientContext( + dashboard_url=tc.ClusterConstants.TEST_VERTEX_RAY_DASHBOARD_ADDRESS, + python_version="MOCK_PYTHON_VERSION", + ray_version="MOCK_RAY_VERSION", + ray_commit="MOCK_RAY_COMMIT", + protocol_version=0, + _num_clients=1, + _context_to_restore=None, + ) +except TypeError: + _TEST_CLIENT_CONTEXT = ray.client_builder.ClientContext( + dashboard_url=tc.ClusterConstants.TEST_VERTEX_RAY_DASHBOARD_ADDRESS, + python_version="MOCK_PYTHON_VERSION", + ray_version="MOCK_RAY_VERSION", + ray_commit="MOCK_RAY_COMMIT", + _num_clients=1, + _context_to_restore=None, + ) _TEST_VERTEX_RAY_CLIENT_CONTEXT = vertex_ray.client_builder._VertexRayClientContext( persistent_resource_id="MOCK_PERSISTENT_RESOURCE_ID",