diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 6fba04481..bca179d78 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -328f83d70fc606f9d544f7e1d81a04d2473f6dec \ No newline at end of file +64d5b2c102315eef89e9b3b0b3fc960049030d77 \ No newline at end of file diff --git a/docs/account/iam/workspace_assignment.rst b/docs/account/iam/workspace_assignment.rst index 2a8043172..ca78b86df 100644 --- a/docs/account/iam/workspace_assignment.rst +++ b/docs/account/iam/workspace_assignment.rst @@ -43,9 +43,9 @@ a = AccountClient() - workspace_id = os.environ["DUMMY_WORKSPACE_ID"] + workspace_id = os.environ["TEST_WORKSPACE_ID"] - all = a.workspace_assignment.list(workspace_id=workspace_id) + all = a.workspace_assignment.list(list=workspace_id) Get the permission assignments for the specified Databricks account and Databricks workspace. diff --git a/docs/account/provisioning/credentials.rst b/docs/account/provisioning/credentials.rst index d63648d58..b71c1707e 100644 --- a/docs/account/provisioning/credentials.rst +++ b/docs/account/provisioning/credentials.rst @@ -24,15 +24,15 @@ a = AccountClient() - role = a.credentials.create( + creds = a.credentials.create( credentials_name=f"sdk-{time.time_ns()}", aws_credentials=provisioning.CreateCredentialAwsCredentials( - sts_role=provisioning.CreateCredentialStsRole(role_arn=os.environ["TEST_CROSSACCOUNT_ARN"]) + sts_role=provisioning.CreateCredentialStsRole(role_arn=os.environ["TEST_LOGDELIVERY_ARN"]) ), ) # cleanup - a.credentials.delete(credentials_id=role.credentials_id) + a.credentials.delete(credentials_id=creds.credentials_id) Creates a Databricks credential configuration that represents cloud cross-account credentials for a specified account. Databricks uses this to set up network infrastructure properly to host Databricks diff --git a/docs/account/provisioning/storage.rst b/docs/account/provisioning/storage.rst index 25ee5abaa..41a04deb3 100644 --- a/docs/account/provisioning/storage.rst +++ b/docs/account/provisioning/storage.rst @@ -23,10 +23,13 @@ a = AccountClient() - storage = a.storage.create( + bucket = a.storage.create( storage_configuration_name=f"sdk-{time.time_ns()}", root_bucket_info=provisioning.RootBucketInfo(bucket_name=f"sdk-{time.time_ns()}"), ) + + # cleanup + a.storage.delete(storage_configuration_id=bucket.storage_configuration_id) Creates a Databricks storage configuration for an account. diff --git a/docs/workspace/catalog/catalogs.rst b/docs/workspace/catalog/catalogs.rst index 0b4d11aaa..258f994d3 100644 --- a/docs/workspace/catalog/catalogs.rst +++ b/docs/workspace/catalog/catalogs.rst @@ -24,10 +24,10 @@ w = WorkspaceClient() - new_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}") + created_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}") # cleanup - w.catalogs.delete(name=new_catalog.name, force=True) + w.catalogs.delete(name=created_catalog.name, force=True) Creates a new catalog instance in the parent metastore if the caller is a metastore admin or has the **CREATE_CATALOG** privilege. @@ -155,12 +155,13 @@ import time from databricks.sdk import WorkspaceClient + from databricks.sdk.service import catalog w = WorkspaceClient() created = w.catalogs.create(name=f"sdk-{time.time_ns()}") - _ = w.catalogs.update(name=created.name, comment="updated") + _ = w.catalogs.update(name=created.name, isolation_mode=catalog.CatalogIsolationMode.ISOLATED) # cleanup w.catalogs.delete(name=created.name, force=True) diff --git a/docs/workspace/catalog/external_locations.rst b/docs/workspace/catalog/external_locations.rst index 045ab59e6..5591a7aa7 100644 --- a/docs/workspace/catalog/external_locations.rst +++ b/docs/workspace/catalog/external_locations.rst @@ -30,20 +30,22 @@ w = WorkspaceClient() - credential = w.storage_credentials.create( + storage_credential = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", - aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), + aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), + comment="created via SDK", ) - created = w.external_locations.create( + external_location = w.external_locations.create( name=f"sdk-{time.time_ns()}", - credential_name=credential.name, - url=f's3://{os.environ["TEST_BUCKET"]}/sdk-{time.time_ns()}', + credential_name=storage_credential.name, + comment="created via SDK", + url="s3://" + os.environ["TEST_BUCKET"] + "/" + f"sdk-{time.time_ns()}", ) # cleanup - w.storage_credentials.delete(delete=credential.name) - w.external_locations.delete(delete=created.name) + w.storage_credentials.delete(name=storage_credential.name) + w.external_locations.delete(name=external_location.name) Creates a new external location entry in the metastore. The caller must be a metastore admin or have the **CREATE_EXTERNAL_LOCATION** privilege on both the metastore and the associated storage @@ -140,11 +142,10 @@ .. code-block:: from databricks.sdk import WorkspaceClient - from databricks.sdk.service import catalog w = WorkspaceClient() - all = w.external_locations.list(catalog.ListExternalLocationsRequest()) + all = w.external_locations.list() Gets an array of external locations (__ExternalLocationInfo__ objects) from the metastore. The caller must be a metastore admin, the owner of the external location, or a user that has some privilege on diff --git a/docs/workspace/catalog/schemas.rst b/docs/workspace/catalog/schemas.rst index fd1479c78..719d5a156 100644 --- a/docs/workspace/catalog/schemas.rst +++ b/docs/workspace/catalog/schemas.rst @@ -22,13 +22,13 @@ w = WorkspaceClient() - new_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}") + created_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}") - created = w.schemas.create(name=f"sdk-{time.time_ns()}", catalog_name=new_catalog.name) + created_schema = w.schemas.create(name=f"sdk-{time.time_ns()}", catalog_name=created_catalog.name) # cleanup - w.catalogs.delete(name=new_catalog.name, force=True) - w.schemas.delete(full_name=created.full_name) + w.catalogs.delete(name=created_catalog.name, force=True) + w.schemas.delete(full_name=created_schema.full_name) Creates a new schema for catalog in the Metastore. The caller must be a metastore admin, or have the **CREATE_SCHEMA** privilege in the parent catalog. diff --git a/docs/workspace/catalog/storage_credentials.rst b/docs/workspace/catalog/storage_credentials.rst index fda14984e..c3c990e99 100644 --- a/docs/workspace/catalog/storage_credentials.rst +++ b/docs/workspace/catalog/storage_credentials.rst @@ -30,13 +30,14 @@ w = WorkspaceClient() - credential = w.storage_credentials.create( + storage_credential = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", - aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), + aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), + comment="created via SDK", ) # cleanup - w.storage_credentials.delete(delete=credential.name) + w.storage_credentials.delete(name=storage_credential.name) Creates a new storage credential. @@ -172,17 +173,17 @@ created = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", - aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), + aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), ) _ = w.storage_credentials.update( name=created.name, comment=f"sdk-{time.time_ns()}", - aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), + aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), ) # cleanup - w.storage_credentials.delete(name=created.name) + w.storage_credentials.delete(delete=created.name) Updates a storage credential on the metastore. diff --git a/docs/workspace/iam/current_user.rst b/docs/workspace/iam/current_user.rst index 2f95213e2..b2390ce63 100644 --- a/docs/workspace/iam/current_user.rst +++ b/docs/workspace/iam/current_user.rst @@ -17,7 +17,7 @@ w = WorkspaceClient() - me2 = w.current_user.me() + me = w.current_user.me() Get details about the current method caller's identity. diff --git a/docs/workspace/iam/permissions.rst b/docs/workspace/iam/permissions.rst index 15524c53e..ea24afd1a 100644 --- a/docs/workspace/iam/permissions.rst +++ b/docs/workspace/iam/permissions.rst @@ -44,7 +44,7 @@ obj = w.workspace.get_status(path=notebook_path) - levels = w.permissions.get_permission_levels(request_object_type="notebooks", request_object_id="%d" % (obj.object_id)) + _ = w.permissions.get(request_object_type="notebooks", request_object_id="%d" % (obj.object_id)) Gets the permissions of an object. Objects can inherit permissions from their parent objects or root object. diff --git a/docs/workspace/jobs/jobs.rst b/docs/workspace/jobs/jobs.rst index 49eee71d9..aea2acd46 100644 --- a/docs/workspace/jobs/jobs.rst +++ b/docs/workspace/jobs/jobs.rst @@ -522,37 +522,11 @@ .. code-block:: - import os - import time - from databricks.sdk import WorkspaceClient - from databricks.sdk.service import jobs w = WorkspaceClient() - notebook_path = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}" - - cluster_id = ( - w.clusters.ensure_cluster_is_running(os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"] - ) - - created_job = w.jobs.create( - name=f"sdk-{time.time_ns()}", - tasks=[ - jobs.Task( - description="test", - existing_cluster_id=cluster_id, - notebook_task=jobs.NotebookTask(notebook_path=notebook_path), - task_key="test", - timeout_seconds=0, - ) - ], - ) - - run_list = w.jobs.list_runs(job_id=created_job.job_id) - - # cleanup - w.jobs.delete(job_id=created_job.job_id) + job_list = w.jobs.list(expand_tasks=False) List jobs. diff --git a/docs/workspace/ml/model_registry.rst b/docs/workspace/ml/model_registry.rst index 3b4d11217..9a6c8f286 100644 --- a/docs/workspace/ml/model_registry.rst +++ b/docs/workspace/ml/model_registry.rst @@ -91,8 +91,6 @@ w = WorkspaceClient() model = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") - - mv = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp") Creates a new registered model with the name specified in the request body. Throws `RESOURCE_ALREADY_EXISTS` if a registered model with the given name exists. @@ -122,7 +120,7 @@ model = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") - mv = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp") + created = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp") Creates a model version. @@ -736,14 +734,13 @@ w = WorkspaceClient() - model = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") + created = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") - created = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp") + model = w.model_registry.get_model(name=created.registered_model.name) - w.model_registry.update_model_version( + w.model_registry.update_model( + name=model.registered_model_databricks.name, description=f"sdk-{time.time_ns()}", - name=created.model_version.name, - version=created.model_version.version, ) Updates a registered model. diff --git a/docs/workspace/workspace/workspace.rst b/docs/workspace/workspace/workspace.rst index 7303aa39d..d4136c637 100644 --- a/docs/workspace/workspace/workspace.rst +++ b/docs/workspace/workspace/workspace.rst @@ -79,7 +79,7 @@ notebook = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}" - export_response = w.workspace.export_(format=workspace.ExportFormat.SOURCE, path=notebook) + export_response = w.workspace.export(format=workspace.ExportFormat.SOURCE, path=notebook) Exports an object or the contents of an entire directory. @@ -176,19 +176,14 @@ w = WorkspaceClient() - notebook_path = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}" + notebook = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}" w.workspace.import_( - path=notebook_path, - overwrite=True, + path=notebook, format=workspace.ImportFormat.SOURCE, language=workspace.Language.PYTHON, - content=base64.b64encode( - ( - """print(1) - """ - ).encode() - ).decode(), + content=base64.b64encode(("# Databricks notebook source\nprint('hello from job')").encode()).decode(), + overwrite=True, ) Imports a workspace object (for example, a notebook or file) or the contents of an entire directory. @@ -232,16 +227,14 @@ .. code-block:: - import os - import time - from databricks.sdk import WorkspaceClient w = WorkspaceClient() - notebook = f"/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}" - - objects = w.workspace.list(path=os.path.dirname(notebook)) + names = [] + for i in w.workspace.list(f"/Users/{w.current_user.me().user_name}", recursive=True): + names.append(i.path) + assert len(names) > 0 List workspace objects